To run the Api collection Parallelly I am using Node, Postman and Newman library to do it.
Here I have 2 api collections saved in a collections folder. I have in total 5 -5 api request saved in a collection. When I am running the collection. I am seeing all the 5 request are ran successfully but data not updated in the backend
var path = require('path'),
async = require('async'),
newman = require('newman');
var parametersForTestRun = {
collection: path.join('./collections/', 'cc1'), // your collection
environment: path.join('./', 'env.json'), //your env
};
var parametersForTestRun2 = {
collection: path.join('./collections/', 'cc2'), // your collection
environment: path.join('./', 'env.json'), //your env
};
parallelCollectionRun = function (done) {
newman.run(parametersForTestRun, done);
setTimeout(function(){}, [4000]);
};
parallelCollectionRun2 = function (done) {
newman.run(parametersForTestRun2, done);
setTimeout(function(){}, [3000]);
};
// Runs the Postman sample collection thrice, in parallel.
async.parallel([
parallelCollectionRun,
parallelCollectionRun2
],
function (err, results) {
err && console.error(err);
results.forEach(function (result) {
var failures = result.run.failures;
console.info(failures.length ? JSON.stringify(failures.failures, null, 2) :
`${result.collection.name} ran successfully.`);
});
}
);
Related
I want to create a uptime monitor using NodeJS and MongoDB. I want to run a cron job in NodeJS and store the data into MongoDB. If the website response status code is not equal to 200 then it will be saved in the database. I want to make a database entry like this,
url : http://www.google.com
status_code : 500
start_time :- start time
end_time :- end time
I can run the cron job but not sure how to save the downtime in the database. As, I don't want to store every response into the database. Only when response status code is other than 200 , then it will start tracking (start_time) the URL and it keeps the time when website is back to 200 as end_time.
cron.js :-
var async=require('async');
const Entry = require('../models/health.model.js');
var https = require('https');
var request = require('request');
module.exports = function getHttpsRequests () {
Entry.find({},function(err,entrys){
console.log(err);
if(!err && entrys){
async.each(entrys,function(entry,callback){
request(entry.url, function (error, response, body) {
entry.statuscheck=response.statusCode;
entry.save();
callback();
});
},function (error) {
});
}
});
}
health.model.js :-
const mongoose = require('mongoose');
const EntrySchema = mongoose.Schema({
url: String,
statuscheck: String
}, {
timestamps: true
});
module.exports = mongoose.model('Entry', EntrySchema);
I would do something like this to handle updating the database. I went ahead and put standard arrow functions in, because it was easier for me that way. I put some comments in so that should clear most questions up. It may not be the most elegant solution because I wrote it in 5 minutes, but if you follow this general logic flow, you should be much closer to your solution (its completely untested mind you.)
var async=require('async');
const Entry = require('../models/health.model.js');
var https = require('https');
var request = require('request');
module.exports = function getHttpsRequests () {
Entry.find({}, (err,entrys) => {
console.log(err);
if (!err && entrys) {
async.each(entrys, (entry,callback) => {
request(entry.url, (error, response, body) => {
//first check if the url has a document in the db.
Entry.find({ url: entry.url }, (err, entry) => {
if(!entry) {
//since the document does not exist, check the statusCode.
if(response.statusCode===200) { //if the statusCode is 200, continue the loop.
callback();
} else { //if the status code is not 200, lets save this to the db.
console.log("Saving object: " + entry)
entry.status_code = response.statusCode;
entry.start_time = new Date();
entry.save();
callback();
}
} else if (entry) {
//since the document exists, lets check the statusCode.
if(response.statusCode===200) { //if the statusCode is 200, update the stop_time.
entry.end_time = new Date();
Entry.findOneAndUpdate({ url: entry.url }, entry, (err, object) => { //this returns the entry after update, so we can put that in the console for easy debug.
if (err) {
console.log(err);
callback();
} else {
console.log("Object saved: " + object);
callback();
}
});
}
} else { //there was an error finding the document in the db, just go to the next one.
callback();
});
});
});
}
});
}
What is the best way to have a single list of users which is shared between several processes?. The processes are initiated using PM2.
The processes will have access to the list in order to add, remove, and check if a user exist in the list already.
The easiest way is to use redis(or memocache, even mongodb) to store those user list.
Or you will have to handle very complex IPC in your case, since pm2 uses node cluter, based on child_process.
You can use an in-memory data store like Redis.
Redis runs as a separate process and serves requests on a TCP port(by default 6379). Redis is a key-value data store and can be used by all your node processes.
Here's how you can do it:
List item
Install redis. (https://redis.io/)
Install node client for redis:
npm install --save redis
Now you can use redis to store your application state data and share it accross processes.
Refer this link for code example.
i just wrote a Job tracking logger for large web crawler system up to 1200 instances using redis.
Ok! Let's do it!
First you will need define it:
const redis = require("redis");
const client_redis = redis.createClient({
retry_strategy: function(options) {
if (options.error && options.error.code === "ECONNREFUSED") {
// End reconnecting on a specific error and flush all commands with
// a individual error
return new Error("The server refused the connection");
}
if (options.total_retry_time > 1000 * 60 * 60) {
// End reconnecting after a specific timeout and flush all commands
// with a individual error
return new Error("Retry time exhausted");
}
if (options.attempt > 10) {
// End reconnecting with built in error
return undefined;
}
// reconnect after
return Math.min(options.attempt * 100, 3000);
},
});
This function for update and create log.
function create_and_update_log(productName2, url2, proc, msg) {
var data_value = {
id: 'BESTBUY::DATA_LOG::'+md5(productName2 + url2),
totalrv: 'WAIT',
product: productName2,
url: url2,
process: proc,
task: msg,
timestamp: moment().format('DD/MM/YYYY HH:mm:ss')
};
client_redis.set('BESTBUY::DATA_LOG::'+md5(productName2 + url2), JSON.stringify(data_value));
}
This function for query all data
async function get_log_redis() {
return new Promise(function(resolve, reject) {
try {
var logger_data = {
logger: []
};
client_redis.multi()
.keys('BESTBUY::DATA_LOG::*', function(err, replies) {
replies.forEach(function(reply, index) {
client_redis.get(reply, function(err, data) {
if (!data.includes("Total reviews left: 0")) {
logger_data.logger.push(JSON.parse(data));
}
if (index == replies.length - 1) {
resolve(logger_data);
}
});
});
})
.exec(function(err, replies) {});
} catch (err) {
console.log(err);
}
});
}
Remember to replace :
BESTBUY::DATA_LOG::
... with what you want to define.
And the final is how to fetch all log belong to my key name begin with "BESTBUY::DATA_LOG::"
var log_obj_data = "";
(async () => {
var log_obj_data = await get_log_redis();
response.writeHead(200, {
"Content-Type": "application/json"
});
response.end(JSON.stringify(log_obj_data));
})();
I'm developing an application using Sails JS and Braintree. I'm trying to send the all past transaction details that the customer has made.
Here is my getTransaction action
getTransaction: function(req, res) {
var customerId = req.param('customerId');
var gateway = setUpGateway();
var stream = gateway.transaction.search(function(search) {
search.customerId().is(customerId);
}, function(err, response) {
if (err) {
return res.serverError(err);
}
res.send(response);
});
},
But the problem is, if I directly send the response which I got from braintree server, it throws the circular dependency error. So, to overcome that error I'm fetching only those details that I need from response like this
getTransaction: function(req, res) {
var customerId = req.param('customerId');
var gateway = setUpGateway();
var stream = gateway.transaction.search(function(search) {
search.customerId().is(customerId);
}, function(err, response) {
if (err) {
return res.serverError(err);
}
var transactions = [];
response.each(function(err, transaction) {
var temp = [];
temp.push(transaction.id);
temp.push(transaction.amount);
temp.push(transaction.createdAt);
transactions.push(temp);
});
res.send(transactions);
});
},
But here the .each function is getting executed asynchronously and hence res.send returns the empty array. So what should I do to return all the transaction that the user has made?
Full disclosure: I work at Braintree. If you have any further questions, feel free to contact our support team.
You are correct that the iterator executes asynchronously. You should use Node's stream semantics to process the request
getTransaction: function(req, res) {
var customerId = req.param('customerId');
var gateway = setUpGateway();
var transactions = [];
var stream = gateway.transaction.search(function(search) {
search.customerId().is(customerId);
});
stream.on('data', function (transaction) {
transactions.push(transaction);
});
stream.on('end', function () {
res.send(transactions);
});
},
This will wait until all transactions have been processed before sending the result.
This page provides more information about searching using our Node client library and Node's Stream API.
I want to make a HTTPS request to an external link through Node JS. On my first call, I need to fetch user id by looping through several users. On my second call, I need to input that user id in the URL link and fetch user properties. Keep repeating this process till I go through all users. The end goal is to store data of every user in a JSON format. There is no front-end involved. Any direction/advice is much appreciated.
I can't share the actual link due to api keys. But here is the hypothetical scenario. I only show 2 users here. I have about 10,000 users in my actual data set.
Link 1
https://www.google.com/all_users
JSON Output
{
"name": "joe",
"uri": "/id/UserObject/User/1234-1234",
},
{
"name": "matt",
"uri": "/id/UserObject/User/5678-5678",
}
Link 2
https://www.google.com//id/UserObject/User/1234-1234
JSON Output
{
"name": "joe",
"uri": "/id/UserObject/User/1234-1234",
"Property Values": {
"height": "2",
"location": "canada"
},
"Other Values": {
"work": "google",
"occupation": "developer"
}
}
Nested JSON
{
"PropertySetClassChildrenResponse": {
"PropertySetClassChildren": {
"PropertySetInstances": {
"totalCount": "1",
"Elements": [
{
"name": "SystemObject",
"uri": "/type/PropertySetClasses/SystemObject"
}
]
}
}
}
}
Not tested, but this should point you in the right direction. It uses Promises and assumes that run in an ES6 environment:
const rp = require('request-promise');
const Promise = require('bluebird');
fetchAllUsers()
.then(extractUserUris)
.then(extractUserIds)
.then(buildUserDetailRequests)
.then(Promise.all) // run all the user detail requests in parallel
.then(allUserData => {
// allUserData is an array of all users' data
});
function fetchAllUsers() {
return rp('https://api.whatever.com/all_users');
}
function extractUserUris(users) {
return users.map(user => user.uri);
}
function extractUserIds(userUris) {
return userUris.map(userUri => userUri.split('/').pop());
}
function buildUserDetailRequests(userIds) {
return userIds.map(userId => rp("https://api.whatever.com/user/" + userId));
}
I'd suggest using the request package to make your HTTP requests easier.
> npm install request
Then you would obtain a list of all users with something like this:
var request = require('request');
request.get({url: "https://example.org/all_users"}, handleUsersResponse);
You'd handle the request response like this:
function(err, response, body) {
if (!err && response.statusCode == 200) {
// parse json (assuming array of users)
var users = JSON.parse(body);
// iterate through each user and obtain user info
for(var i = 0; i < users.length; i++) {
var userUri = users[i].uri;
obtainUserInfo(userUri)
}
}
}
obtainUserInfo function would be similar to the above code.
One important thing to keep in mind is that since the HTTP requests are being made asynchronously, when you make the requests in a loop, the next iteration of the loop does not wait until the work is finished before moving to the next iteration and starting the next request. So in effect, your loop would start all the HTTP requests nearly in parallel. This can easily overwhelm both your client and the server. One way to get around this is to use a worker queue to enqueue the work and ensure that only a maximum number of HTTP requests are being executed at any given time.
You don't want to do synchronous calls, it defeats the purpose of using Node. So by the Node powers invested in me by the State of Texas I hereby cast that synchronous way I thinking out of you!
Just kidding :), but let's do this the Node way.
Install these two libraries:
sudo npm install Promise
sudo npm install request
And set your code to look like:
var Promise = require('promise');
var request = require('request');
//Get your user data, and print the data in JSON:
getUserData()
.then(function(userData) {
console.log(JSON.stringify(userData));
}).catch(function(err) {
console.log('Error: ' +err);
});
/**
* Prepares an Object containing data for all users.
* #return Promise - Contains object with all user data.
*/
function getUserData() {
return new Promise(function(fulfill, reject) {
// Make the first request to get the user IDs:
var url1 = 'https://www.google.com/all_users';
get(url1)
.then(function(res) {
res = JSON.parse(res);
// Loop through the object to get what you need:
// Set a counter though so we know once we are done.
var counter = 0;
for (x=0; x<res.users.length; x++) {
var url2 = 'https://www.google.com//id/UserObject/User/';
url2 = url2 + res.users.id; //Wherever the individual ID is stored.
var returnDataArr = [];
get(url2)
.then(function(res2) {
// Get what you need from the response from the 2nd URL.
returnDataArr.push(res2);
counter++;
if (counter === res.users.length) {
fulfill({data: returnDataArr}); //Return/Fulfill an object containing an array of the user data.
}
}).catch(function(err) {
// Catch any errors from the 2nd HTTPS request:
reject('Error: ' +err);
});
}).catch(function(err) {
// Catch any errors from the 1st HTTPS request:
reject('Error: ' +err);
});
}
/**
* Your HTTPS GET Request Function
* #param url - The url to GET
* #return Promise - Promise containing the JSON response.
*/
function get(url) {
return new Promise(function(fulfill, reject) {
var options = {
url: url,
headers: {
'Header Name': 'Header Value',
'Accept': 'application/json',
'Content-Type': 'application/json'
};
request(options, function(err, res, body) {
if (err) {
reject(err);
} else {
fulfill(body);
}
});
});
}
So what this Promise does, is that it returns the value once we actually have it. In the code above, we are first getting that list of users, and then as we parse through it, we are making a new asynchronous HTTP request to get the additional data on it. Once we get the user data, we push it to an array.
Finally, once our counter hits its endpoint, we know that we have gotten all the user data, and so we call fulfill which essentially means return, and it returns an object containing an array of the user data.
Let me know if this makes sense.
The answers above helped me go further with my solution and get the desired outcome. However, I spent a lot of time trying to understand node, promises in node, making an API call, etc. Hopefully, this will help to a beginner level node developer.
NODE
Node.jsĀ® is a JavaScript runtime built on Chrome's V8 JavaScript engine. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient. Node.js' package ecosystem, npm, is the largest ecosystem of open source libraries in the world.
If you are a JavaScript developer, you would prefer to use Node as you wouldn't have to spend time learning a new language like Java or Python.
GOAL
Make a HTTPS call to an external link to fetch all server URIs. Pass in the URI as a param to create a second link to fetch all server properties. Loop through to all server uris and properties. Refer the original post on the top for the data structure. The external link also required basic auth and headers.
CODE
Install NPM modules request (https call), bluebird (promises) and lodash(utility) and express(node framework).
/
********************** MODULES/DEPENDENCIES **********************/
var express = require('express');
var request = require('request');
var Promise = require('bluebird');
var _ = require("lodash");
/********************** INITIATE APP **********************/
var app = express();
console.log("Starting node server...");
/**
* Your HTTPS GET Request Function
* #param url - The url to GET
* #return Promise - Promise containing the JSON response.
*/
function get(url) {
return new Promise(function(resolve, reject) {
// var auth = "Basic " + new Buffer(username + ':' + password).toString("base64");
var options = {
url: url,
headers: {
// 'Authorization': auth,
'Content-Type': 'application/json',
'Accept': 'application/json'
}
};
console.log("Calling GET: ", url);
if ('development' == app.get('env')) {
console.log("Rejecting node tls");
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
}
request(options, function(error, response, body) {
if (error) {
reject(error);
} else {
// console.log("THIS IS BODY: ", body);
resolve(body);
}
});
});
}
/********************** GET DATA FUNCTION **********************/
function getServerData() {
/********************** URI VARIABLES **********************/
var username = 'username',
password = 'password',
role = 'Read-Only',
url_host = 'https://link.com:10843';
/********************** URL 1 **********************/
var url1 = url_host + '/type/PropertySetClasses/SystemObject/Server/?maxResults=1000&username=' + username + '&password=' + password + '&role=' + role;
console.log("Getting server data...", url1);
/********************** GET REQUEST 1 **********************/
return get(url1)
.then(function(res) {
console.log("Got response!");
res = JSON.parse(res);
res = res.PropertySetClassChildrenResponse.PropertySetClassChildren.PropertySetInstances.Elements;
// console.log("THIS IS RES: ", res);
/********************** FETCH URI FROM RES NESTED OBJECT **********************/
var server_ids = _.map(res, function(server) {
return server.uri;
});
console.log("Calling server urls", server_ids);
// Loop through the object to get what you need:
// Set a counter though so we know once we are done.
return Promise.map(server_ids, function (id) {
var url2 = url_host + id + '?username=' + username + '&password=' + password + '&role=' + role;
console.log("Calling URL", url2);
return get(url2)
.then(function(res2) {
res2 = JSON.parse(res2);
var elements = res2.PropertySetInstanceResponse.PropertySetInstance.PropertyValues.Elements;
console.log("Got second response", res2, elements);
return elements;
});
})
.then(function (allUrls) {
console.log("Got all URLS", allUrls);
return allUrls;
});
})
.catch(function(err) {
console.error(err);
throw err;
});
};
app.listen(8080, function() {
console.log("Server listening and booted on: " + 8080);
app.get("/serverInfo", function (req, res) {
console.log("Calling server info");
return getServerData()
.then(function(userData) {
var userData = JSON.stringify(userData, null, "\t");
console.log("This is USERDATA Data: ", userData);
res.send(userData);
})
.catch(function(err) {
console.error(err);
res.send({
__error: err,
message: err.message
});
});
});
});
I'm trying to write at test for a Sails.Js controller action that downloads a user's avatar image. The controller action looks like this:
/**
* Download avatar of the user with the specified id
*
* (GET /user/:id/avatar)
*/
avatar: function (req, res) {
req.validate({
id: 'string'
});
User.findOne(req.param('id')).exec(function (err, user){
if (err) return res.negotiate(err);
if (!user) return res.notFound();
// User has no avatar image uploaded.
// (should have never have hit this endpoint and used the default image)
if (!user.avatarFd) {
return res.notFound();
}
var SkipperDisk = require('skipper-disk');
var fileAdapter = SkipperDisk(/* optional opts */);
// Stream the file down
fileAdapter.read(user.avatarFd)
.on('error', function (err){
return res.serverError(err);
})
.pipe(res);
});
}
So far the test looks like this:
describe('try to download a user avatar', function() {
var result;
it('should return 200', function(done) {
server
.get('/user/' + testUser.id + '/avatar')
.expect(200)
.end(function(err, res) {
if (err) return done(err);
result = res.body;
return done();
});
}
}
it('should return binary data stream', function(done) {
// make some assertions.
});
});
I want to add another test to make sure that what has been returned is binary data, but I can't figure out how this would be done. Anyone know the right way to go about this?
UPDATE
After attempting a solution in the mode that #sgress454 suggested below, I ended up with this test:
tmp = require('temporary');
// https://github.com/vesln/temporary
// Write nonsense bytes to a file fixture.
var EOF = '\x04';
var size = 1000;
var fileFixture = new tmp.File();
fileFixture.writeFileSync(crypto.pseudoRandomBytes(size) + EOF);
fileFixture.size = size;
after(function() {
fileFixture.unlinkSync();
});
describe('try to download a user avatar', function() {
var download;
it('should return 200', function(done) {
server
.get('/user/' + testUser.id + '/avatar')
.expect(200)
.end(function(err, res) {
if (err) return done(err);
download = new Buffer(res.text, 'ascii');
return done();
});
});
it('should return binary stream', function(done) {
var testAvatar = fs.readFileSync(fileFixture.path);
download.toString('base64').should.be.equal(testAvatar.toString('base64'));
done();
});
});
So this test mocks up a file using temporary. The trouble is that when I compare the result I get back from the server and the mock file that I'm reading from the mocked file system, they aren't the same. I get the following as expected:
+bEk277+9azo277+916jvv71g77+9KO+/vV/vv71577+977+9U8e9A++/ve+/vSgiF++/ve+/vWsi77+977+9BRRs77+977+977+9bO+/vSoGRW3vv73vv73dr++/ve+/vXEAIFbvv70p77+977+9WMuSSm/vv73vv71W77+977+9LULvv70J77+9eFfSoVsW77+977+9QAleLgDvv71T77+9de+/vRHvv71qyJPvv73vv73vv73vv73vv71S77+91L4sf++/vQHaiicDKXXvv71977+9NO+/vUzvv71YXe+/vTjvv70n77+9fWvvv73vv709YgoW77+9bmF/77+9JNK4LO+/vUNdNGjvv70TZMazS+2IjBdgL++/ve+/vRXvv71S77+977+9SHHvv70QY++/vSbvv70SC2US77+9eGnvv71cczVOFBp7fu+/ve+/ve+/ve+/vWTvv70B77+9cm/vv73vv73vv73vv70q77+977+9JSxY77+9TO+/vQbvv73vv71sREl+Qyrvv70JFXgSHBLvv71v77+977+9AkBPOwvvv73vv73vv71R77+9VSHvv71DZ2NB77+977+977+977+9Pu+/ve+/vcabMe+/ve+/ve+/ve+/vUFnXFJP77+977+977+977+9G1/vv73vv73vv71OQe+/ve+/vdmU77+9B++/vUts77+9Zu+/vS9uUH3vv73vv73vv71y77+9PlRXSSJ3UHHvv73vv71SBXvvv73vv70677+977+9dk5O77+9de+/vTzvv70Y77+9cmjvv73vv73vv73vv712UNC0WW7vv73vv71lZD4+77+9U++/vR4MNW8RY37vv70ZTUo2fl0sKu+/vUN/bipyPO+/vSrvv73vv73vv700Bjwa77+977+9RH8A77+977+977+9zrDvv73vv70JQ2tOKe+/vV7Mk2Hvv73vv73vv70L77+9Tu+/vQwPK++/ve+/ve+/vVTvv73vv70M77+977+9Zs2/Vu+/vXzvv73vv73vv71a77+977+977+9Au+/vSrvv73vv70S77+977+9eO+/ve+/vVFk77+977+9Jm4L77+977+9fVnRl05x77+9ai1SSDZiX2fvv73vv73vv73vv73vv73vv73vv73vv71Y77+977+977+9VFvvv71B77+9X++/vTbvv70w77+977+9TO+/vSQBMB4+77+977+9Z++/vTDvv73vv71/77+977+9Dnd9Be+/vUZCehYuFu+/vVfvv73vv73vv73vv73vv73vv70+HO+/ve+/ve+/ve+/ve+/vSgLVitLQO+/ve+/vUZP77+977+977+9adWy77+977+9H++/ve+/vWTvv71677+93Zzvv73vv73vv71t77+977+977+9BGMkIFkYSxRW77+977+977+9Ke+/vRoN77+9f9CIUXQXWu+/vSYp77+9VDPvv71fLAxU77+977+977+9N++/vTbvv73vv73vv71dIjzvv73vv73vv71Z77+977+9He+/ve+/vWd6LO+/vQDvv70Bae+/vRQZ77+977+90YLvv717Ji3vv716Bu+/ve+/ve+/vVpU77+9aO+/ve+/vWnvv73vv70u2a/vv73vv73vv71p77+9WiAh77+9JyLvv73vv73vv73vv71QIUzvv71pypRO77+9Fe+/vQ7vv70Z77+9Se+/vUHvv73vv73vv70tA++/vSjvv73vv73vv73vv716K8e677+977+977+977+9Zyjvv73vv71U77+9Oe+/vRcF77+9Ku+/ve+/ve+/ve+/vVl777+9ewUAUu+/ve+/ve+/vUV/GGA6fu+/ve+/vVfvv705BA50D++/vSrvv73vv73vv71d77+977+977+977+9KO+/ve+/vUBzbO+/ve+/ve+/ve+/vXUnPS7vv71gCe+/vQ/vv70d77+9P00d77+9Tx8cOz8ABe+/vRbvv70t77+9IO+/ve+/ve+/ve+/ve+/ve+/vSQt77+9GE7vv73vv73vv73vv73So++/vVTvv71BEgDvv73vv70BdRYeTO+/vTjvv71+Ku+/vXjTu++/ve+/ve+/vRQK77+9Su+/vTvskJB/b1dyU++/ve+/vW7vv71k77+9Pu+/ve+/ve+/ve+/ve+/vVk277+9Pyfvv73vv73vv70mXO+/ve+/ve+/ve+/ve+/vQIr77+9QO+/vS1nAyXvv73vv713Ve+/vVTvv70VcV5m77+9M++/ve+/ve+/vWUx77+9OT1g77+9MQnvv71N77+977+977+9byjvv73vv71W77+977+9x5rvv70PBO+/ve+/ve+/ve+/ve+/ve+/ve+/vQd0Ru+/ve+/vU1zG++/vW5W77+977+9ES9udy3vv71CbGpVDgXvv71977+977+9QhLvv71xfnEN77+9KzDvv70KKO+/vVDvv70E
And the following as the actual response:
-bEk2/Ws6Nv3o/WD9KP1f/Xn9/VP9A/39KCIX/f1rIv39BRRs/f39bP0qBkVt/f1v/f1xACBW/Sn9/VjSSm/9/Vb9/S1C/Qn9eFehWxb9/UAJXi4A/VP9df0R/WoT/f39/f1S/T4sf/0BiicDKXX9ff00/Uz9WF39OP0n/X1r/f09YgoW/W5hf/0kuCz9Q100aP0TZLNLDBdgL/39Ff1S/f1Icf0QY/0m/RILZRL9eGn9XHM1ThQae379/f39ZP0B/XJv/f39/Sr9/SUsWP1M/Qb9/WxESX5DKv0JFXgSHBL9b/39AkBPOwv9/f1R/VUh/UNnY0H9/f39Pv39mzH9/f39QWdcUk/9/f39G1/9/f1OQf39VP0H/Uts/Wb9L25Qff39/XL9PlRXSSJ3UHH9/VIFe/39Ov39dk5O/XX9PP0Y/XJo/f39/XZQNFlu/f1lZD4+/VP9Hgw1bxFjfv0ZTUo2fl0sKv1Df24qcjz9Kv39/TQGPBr9/UR/AP39/bD9/QlDa04p/V4TYf39/Qv9Tv0MDyv9/f1U/f0M/f1mf1b9fP39/Vr9/f0C/Sr9/RL9/Xj9/VFk/f0mbgv9/X1ZV05x/WotUkg2Yl9n/f39/f39/f1Y/f39VFv9Qf1f/Tb9MP39TP0kATAePv39Z/0w/f1//f0Od30F/UZCehYuFv1X/f39/f39Phz9/f39/SgLVitLQP39Rk/9/f1pcv39H/39ZP16/Vz9/f1t/f39BGMkIFkYSxRW/f39Kf0aDf1/CFF0F1r9Jin9VDP9XywMVP39/Tf9Nv39/V0iPP39/Vn9/R39/Wd6LP0A/QFp/RQZ/f1C/XsmLf16Bv39/VpU/Wj9/Wn9/S5v/f39af1aICH9JyL9/f39UCFM/WmUTv0V/Q79Gf1J/UH9/f0tA/0o/f39/Xor+v39/f1nKP39VP05/RcF/Sr9/f39WXv9ewUAUv39/UV/GGA6fv39V/05BA50D/0q/f39Xf39/f0o/f1Ac2z9/f39dSc9Lv1gCf0P/R39P00d/U8fHDs/AAX9Fv0t/SD9/f39/f0kLf0YTv39/f2j/VT9QRIA/f0BdRYeTP04/X4q/Xj7/f39FAr9Sv07EH9vV3JT/f1u/WT9Pv39/f39WTb9Pyf9/f0mXP39/f39Aiv9QP0tZwMl/f13Vf1U/RVxXmb9M/39/WUx/Tk9YP0xCf1N/f39byj9/Vb9/dr9DwT9/f39/f39B3RG/f1Ncxv9blb9/REvbnct/UJsalUOBf19/f1CEv1xfnEN/Ssw/Qoo/VD9BA==
I'm not sure why the files would be different at this point. Perhaps the problem is in the way I'm parsing the data that comes back?
This is one of those cases where the most obvious solution is the best: use the controller action to download a known file in your test, then load that same file from disk within the test and compare it to the one that was downloaded. You'll save an avatar file in your test directory somewhere (perhaps under a fixtures subdirectory), and make sure that before your test runs, a user is created whose avatarFd points to that file. Then, the easiest (least efficient) way to do the second test is to just leave your first test as-is, and re-run the request:
it('should return binary data stream', function(done) {
server
.get('/user/' + testUser.id + '/avatar')
.end(function(err, res) {
if (err) return done(err);
result = res.text;
var testAvatar = require('fs').readFileSync(pathToTestAvatar);
assert.equal(testAvatar.toString(), result.toString();
return done();
});
});
Note the reference to res.text instead of res.body--since you're not specifying a content-type header for the response in your controller, Supertest makes no assumptions and just adds the raw data to the text field in the response. If you put a res.set("content-type", "image/jpeg"); in the controller, for example, then the response body in your test would be a Buffer with the image bytes in it.
When I want to run separate tests on the result of a request, I tend to do the request itself in a before function, then save the body and statusCode in closure-scoped variables as you started to do here. Then you can run individual it tests for things like the status code, body contents, etc. So my whole test would look something like:
var assert = require('assert');
var path = require('path');
var server = require('supertest');
var fs = require('fs');
describe('try to download a user avatar', function() {
var result, status;
var testAvatarFd = path.resolve(__dirname, '..', 'fixtures', 'test.jpeg');
// Create the user and make the request before running the actual tests.
before(function(done) {
// Create a test user
User.create({
avatarFd: testAvatarFd
}).exec(function(err, testUser) {
if (err) {return done(err);}
// Then request the user avatar from the server
server(sails.hooks.http.app)
.get('/user/' + testUser.id + '/avatar')
.end(function(err, res) {
if (err) return done(err);
// Save the result in our closure-scoped variables
result = res.text;
status = res.statusCode;
return done();
});
});
});
// The status code test is now synchronous
it('should return 200', function() {
assert.equal(status, 200);
});
// As is the file test, since we're using `readFileSync`
it('should return binary data stream', function() {
var testAvatar = fs.readFileSync(testAvatarFd);
assert.equal(testAvatar.toString(), result.toString());
});
});