Node.js - Check if stream has error before piping response - javascript

In Node.js, say that I want to read a file from somewhere and stream the response (e.g., from the filesystem using fs.createReadStream()).
application.get('/files/:id', function (request, response) {
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
var mimeType = getMimeTypeSomehow(request.params.id);
if (mimeType === 'application/pdf') {
response.set('Content-Range', ...);
response.status(206);
} else {
response.status(200);
}
readStream.pipe(response);
});
However, I want to detect if there is an error with the stream before sending my response headers. How do I do that?
Pseudocode:
application.get('/files/:id', function (request, response) {
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
readStream.on('ready', function () {
var mimeType = getMimeTypeSomehow(request.params.id);
if (mimeType === 'application/pdf') {
response.set('Content-Range', ...);
response.status(206);
} else {
response.status(200);
}
readStream.pipe(response);
});
readStream.on('error', function () {
response.status(404).end();
});
});

Write stream is ended when readStream ends or has an error. You can prevent this default behaviour by passing end:false during pipe and end the write stream manually.
So even if the error occurs, your write stream is still open and you can do other stuff(e.g. sending 404 status) with writestream in the error callback.
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
readStream.on('error', function () {
res.status(404).end();
});
readStream.on('end', function(){
res.end(); //end write stream manually when readstream ends
})
readStream.pipe(res,{end:false}); // prevent default behaviour
Update 1: For file streams, you can listen for open event to check if the file is ready to read:
readStream.on('open', function () {
// set response headers and status
});
Update 2: As OP mentioned there may be no open event for other streams, we may use the following if the stream is inherited from node's stream module. The trick is we write the data manually instead of pipe() method. That way we can do some 'initialization' on writable before starting to write first byte.
So we bind once('data') first and then bind on('data'). First one will be called before actual writing is happened.
readStream
.on('error',function(err) {
res.status(404).end();
})
.once('data',function(){
//will be called once and before the on('data') callback
//so it's safe to set headers here
res.set('Content-Type', 'text/html');
})
.on('data', function(chunk){
//now start writing data
res.write(chunk);
})
.on('end',res.end.bind(res)); //ending writable when readable ends

Related

Convert raw buffer data to a JPEG image

This question is a follow-up from this answer. I'm asking it as a question because I don't have enough reputation to make comments.
I am trying to write an Android app that takes an image and sends it to a Node.js server.
I found this code in the post linked above, but it leaves me with a raw buffer of data. How can I save the contents of this buffer as a JPEG image?
app.post('/upload-image', rawBody, function(req, res) {
if (req.rawBody && req.bodyLength > 0) {
// TODO save image (req.rawBody) somewhere
// send some content as JSON
res.send(200, {
status: 'OK'
});
} else {
res.send(500);
}
});
function rawBody(req, res, next) {
var chunks = [];
req.on('data', function(chunk) {
chunks.push(chunk);
});
req.on('end', function() {
var buffer = Buffer.concat(chunks);
req.bodyLength = buffer.length;
req.rawBody = buffer;
next();
});
req.on('error', function(err) {
console.log(err);
res.status(500);
});
}
You can simply use writeFile
Pass the file path as first argument, your buffer as second argument, and a callback to execute code when the file has been written.
Alternatively, use writeFileSync, a synchronous version of writeFile which doesn't take a callback

Buffer not recognized as buffer on nodejs with sails js

I'm trying to get the buffer from a blob being sent to my SailsJs server.
An example of what is being sent to the server is this:
Blob(3355) {size: 3355, type: "video/x-matroska;codecs=avc1,opus"}
Once on the server side, I do the following:
let body = new Array(0);
let buffer;
let readable = req.file('recordingPart');
readable.on('data', (chunk) => {
body.push(new Buffer(chunk));
});
readable.on('end', () => {
buffer = Buffer.concat(body);
console.log('There will be no more data.', buffer.length, buffer);
});
When running this part of the code I get the error:
buffer.js:226
throw new errors.TypeError(
^
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be one of type string, buffer, arrayBuffer, array, or array-like object. Received type object
at Function.from (buffer.js:226:9)
at new Buffer (buffer.js:174:17)
...
In this case the error is at the body.push(new Buffer(chunk)); on new Buffer(chunk)
My first approach was similar:
let body = [];
let buffer;
let readable = req.file('recordingPart');
readable.on('data', (chunk) => {
body.push(chunk);
});
readable.on('end', () => {
buffer = Buffer.concat(body);
console.log('There will be no more data.', buffer.length, buffer);
});
but I've got this error:
buffer.js:475
throw kConcatErr;
^
TypeError [ERR_INVALID_ARG_TYPE]: The "list" argument must be one of type array, buffer, or uint8Array
at buffer.js:450:20
In this one the error pops at Buffer.concat(body);
I got some guidance from this answer Node.js: How to read a stream into a buffer?
Can anyone help me in getting the buffer from that req.file.
You can get uploadedFile as below.
req.file('recordingPart').upload(function (err, uploadedFiles){
if (err) return res.serverError(err);
// Logic with uploadedFiles goes here
});
You can get file descriptor from uploadedFiles[0].fd and use it to read/stream the file as below.
fs.readFile(uploadedFiles[0].fd, 'utf8', function (err,data) {
// Logic with data goes here
var myBuffer = Buffer.from(data);
});
To use fs as above create fs instance as below.
var fs = require('fs');
Your current upload approach will work but there's another new way you might want to consider:
// Upload the image.
var info = await sails.uploadOne(photo, {
maxBytes: 3000000
})
// Note: E_EXCEEDS_UPLOAD_LIMIT is the error code for exceeding
// `maxBytes` for both skipper-disk and skipper-s3.
.intercept('E_EXCEEDS_UPLOAD_LIMIT', 'tooBig')
.intercept((err)=>new Error('The photo upload failed: '+util.inspect(err)));
Full Example Here
Also check out the Sails.JS Platzi Course for video tutorials on this latest upload functionality using the example project Ration.

Fetch vs Request

I'm consuming a JSON stream and am trying to use fetch to consume it. The stream emits some data every few seconds. Using fetch to consume the stream gives me access to the data only when the stream closes server side. For example:
var target; // the url.
var options = {
method: "POST",
body: bodyString,
}
var drain = function(response) {
// hit only when the stream is killed server side.
// response.body is always undefined. Can't use the reader it provides.
return response.text(); // or response.json();
};
var listenStream = fetch(target, options).then(drain).then(console.log).catch(console.log);
/*
returns a data to the console log with a 200 code only when the server stream has been killed.
*/
However, there have been several chunks of data already sent to the client.
Using a node inspired method in the browser like this works every single time an event is sent:
var request = require('request');
var JSONStream = require('JSONStream');
var es = require('event-stream');
request(options)
.pipe(JSONStream.parse('*'))
.pipe(es.map(function(message) { // Pipe catches each fully formed message.
console.log(message)
}));
What am I missing? My instinct tells me that fetch should be able to mimic the pipe or stream functionality.
response.body gives you access to the response as a stream. To read a stream:
fetch(url).then(response => {
const reader = response.body.getReader();
reader.read().then(function process(result) {
if (result.done) return;
console.log(`Received a ${result.value.length} byte chunk of data`);
return reader.read().then(process);
}).then(() => {
console.log('All done!');
});
});
Here's a working example of the above.
Fetch streams are more memory-efficient than XHR, as the full response doesn't buffer in memory, and result.value is a Uint8Array making it way more useful for binary data. If you want text, you can use TextDecoder:
fetch(url).then(response => {
const reader = response.body.getReader();
const decoder = new TextDecoder();
reader.read().then(function process(result) {
if (result.done) return;
const text = decoder.decode(result.value, {stream: true});
console.log(text);
return reader.read().then(process);
}).then(() => {
console.log('All done!');
});
});
Here's a working example of the above.
Soon TextDecoder will become a transform stream, allowing you to do response.body.pipeThrough(new TextDecoder()), which is much simpler and allows the browser to optimise.
As for your JSON case, streaming JSON parsers can be a little big and complicated. If you're in control of the data source, consider a format that's chunks of JSON separated by newlines. This is really easy to parse, and leans on the browser's JSON parser for most of the work. Here's a working demo, the benefits can be seen at slower connection speeds.
I've also written an intro to web streams, which includes their use from within a service worker. You may also be interested in a fun hack that uses JavaScript template literals to create streaming templates.
Turns out I could get XHR to work - which doesn't really answer the request vs. fetch question. It took a few tries and the right ordering of operations to get it right. Here's the abstracted code. #jaromanda was right.
var _tryXhr = function(target, data) {
console.log(target, data);
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function () {
console.log("state change.. state: "+ this.readyState);
console.log(this.responseText);
if (this.readyState === 4) {
// gets hit on completion.
}
if (this.readyState === 3) {
// gets hit on new event
}
};
xhr.open("POST", target);
xhr.setRequestHeader("cache-control", "no-cache");
xhr.setRequestHeader("Content-Type", "application/json");
xhr.send(data);
};

Function to check if externally hosted textfile contains string acting slow- Node

I have the following code, which will retrieve a text file from an external server, and search the file for a specific string.
The function:
function checkStringExistsInFile(String, cb) {
var opt = {
host: 'site.com',
port: 80,
path: '/directory/data.txt',
method: 'GET',
};
var request = http.request(opt, function(response){
response
.on('data', function(data){
var string = data+"";
var result = ((string.indexOf(" "+ String +" ")!=-1)?true:false);
cb(null, result);
})
.on('error', function(e){
console.log("-ERR: Can't get file. "+JSON.stringify(e));
if(cb) cb(e);
})
.on('end', function(){
console.log("+INF: End of request");
});
});
request.end();
}
And this is where I call the function, and do something with the results.
checkStringExistsInFile(String, function(err, result){
if(!err) {
if(result) {
//there was a result
} else {
//string not present in file
}
} else {
// error occured
}
});
This worked great in the beginning (small text file), but my textfile is getting larger (4000 characters+) and this is not working anymore.
What can I do to solve this? Should I safe the temporary save the file on my server first, should I open the file as a stream?
It would be appreciated if you can support your answer with a relevant example. Thanks in advance!
Documentation :
If you attach a data event listener, then it will switch the stream into flowing mode, and data will be passed to your handler as soon as it is available.
If you just want to get all the data out of the stream as fast as possible, this is the best way to do so.
http://nodejs.org/api/stream.html#stream_event_data
Data event is emitted as soon as there are data, even if the stream is not completely loaded. So your code just look for your string in the first lines of your stream, then callbacks.
What to do ?
Your function should only call callback on the end() event, or as soon as it finds something.
function checkStringExistsInFile(String, cb) {
var opt = {
host: 'site.com',
port: 80,
path: '/directory/data.txt',
method: 'GET',
};
var request = http.request(opt, function(response){
var result = false;
response
.on('data', function(data){
if(!result) {
var string = data+"";
result = (string.indexOf(" "+ String +" ")!=-1);
if(result) cb(null, result);
}
})
.on('error', function(e){
console.log("-ERR: Can't get file. "+JSON.stringify(e));
if(cb) cb(e);
})
.on('end', function(){
console.log("+INF: End of request");
cb(null, result)
});
});
request.end();
}

How to pass object parameters to functions in JavaScript

My server.js is
// server.js - the outer server loop
var http = require('http')
, php = require("./phpServer");
function start() {
function onRequest(request, response) {
php.phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
response.write('Ending');
response.end();
}
http.createServer(onRequest).listen(80);
console.log("Server started.");
}
exports.start = start;
That calls php.phpServer every request with response as the 3rd param.
phpServer contains.
//
// phpServer.js - a generic server to serve static files and
//
var fs = require('fs')
, pathfuncs = require('path')
, url = require('url')
, mimetypes = require('./mimetypes')
function phpServer(root, request, response) {
// serve static or pass to php.
var data = url.parse(request.url);
var ext = pathfuncs.extname(data.pathname);
fs.stat(root+request.url, function(err, stat) {
if (err || !stat.isFile()) { // error or not file.
console.log('404');
response.writeHead(404);
response.write('Not Found');
return;
}
// exists - serve.
console.log("serve("+root+request.url+", mimetypes.mimetype("+ext+"))");
response.writeHead(200, {'Content-Type': mimetypes.mimetype(ext)});
response.write('Somethign to serve');
// fs.createReadStream(root+request.url).pipe(response);
});
}
exports.phpServer = phpServer
As I see it, response is an object and is passed by reference, therefore the response.write() here should write to the response.
It doesn't. Response here is NOT the same as response in onRequest, so nothing in phpServer is sent to the browser - not code nor content.
The console.logs come out and show what I would expect.
How can I get the object response passed so I can call write on it?
------------- added later -------------------
I've tried to apply answers given and code for server.is now
// server.js - the outer server loop
var http = require('http')
, fs = require('fs')
, pathfuncs = require('path')
, url = require('url')
, mimetypes = require('./mimetypes')
function phpServer(root, request, res) {
// code adapted from page 118 of Smashing Node.js by Guillermo Rauch
// res is response provided to onRequest.
var data = url.parse(request.url);
var ext = pathfuncs.extname(data.pathname);
res.write('Start reply');
fs.stat(root+request.url, function(err,stat) {
// define delayed callback - reponse in scope
if (err || !stat.isFile()) { // error or not file.
console.log('404');
res.writeHead(404);
res.write('Not Found');
res.end
return;
};
// exists so serve.
console.log("serve("+root+request.url+", mimetypes.mimetype("+ext+"))");
res.writeHead(200, {'Content-Type': mimetypes.mimetype(ext)});
res.write('The file contents');
res.end;
} // end callback,
); // end fs.stat call.
} // end phpServer
function start() {
function onRequest(request, response) {
phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
}
http.createServer(onRequest).listen(80);
console.log("Server started.");
}
exports.start = start;
This does not reply at all - it times out. However the call to res.writeHead will either
fail, if res is out of scope/does not exist/undefined, or succeed if re is the param passed in.
It succeeds, and is followed by write and end, so please - what have I got wrong.
If the file does not exist I get a start reply and then a timeout.
At the res.write('Start reply'); res is the response param, yet it isn't later in the fs.stat call-back.
Why not?
Damn - this is frustrating.
The call to response.end should be moved from the onRequest function to phpServer. As it stands phpServer cannot write anything else since the stream has been closed.
function onRequest(request, response) {
php.phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
// response.end(); // move this to phpServer
}
As explained in the documentation for response.end
This method signals to the server that all of the response headers and body have been sent; that server should consider this message complete.
Your problem is not with parameter passing, it's with basic asynchronous control flow. The stat() function does not do its work immediately. Its callback parameter is called when it's done. You basically cannot structure the code the way you've done it. Instead, your "phpServer" code will need to take a callback parameter of its own, and call it after it does its work.

Categories

Resources