jest + enzyme + react16: <img> src : request not sent - javascript

I'm using jest + enzyme to test my react component "AnimateImage" which contains an image element:
import * as React from 'react';
import { PureComponent } from 'react';
interface Props {
src: string;
}
class AnimateImage extends PureComponent<Props> {
onImgLoad = (e: Event | {target: HTMLImageElement}) => {
console.log("yes!");
};
render() {
return (
<div className="app-image-container">
<img
ref={c => {
if (!c) {
return;
}
c.onerror = function(e){
console.log("error:" , e);
}
if(!c.onload){
c.onload = this.onImgLoad;
if (c && c.complete && c.naturalWidth !== 0) {
this.onImgLoad({
target: c
})
}
}
}}
src={this.props.src}
/>
</div>
);
}
}
export default AnimateImage;
test code:
test("image ", () => {
const component = mount(<AnimateImage src={url_test}/>);
expect(component).toMatchSnapshot();
console.log("end ##################################################################");
})
the expected result:
the image's onload handler is called and I can see the "yes!" printed in the console.
the real result:
the image's onload handler is not called and the image's complete attribute is false.
my jest configuration:
verbose: true,
transform: {
'.(ts|tsx)': 'ts-jest'
},
snapshotSerializers: ['enzyme-to-json/serializer'],
moduleFileExtensions: ['ts', 'tsx', 'js', 'json'],
testEnvironment: "jest-environment-jsdom-fourteen",
testEnvironmentOptions: { "resources": 'usable' },
debug step:
I've confirmed that the Canvas is installed successfully and works well in the jsdom.
the jsdom's resource-loader uses "request-promise-native" package to fetch HTTP resource. The "request-promise-native" package's core is "request" package.
in the "request" package, the request.js file declares a class called Request to handle HTTP request.
But I found that the Request.start() function is never called and the defer function is called with the request's status "abort".
by the way, I've put two "console.log()" in the function where the simulated "window" and "document" call "close" function and "console.log('abort')" in the place where the request is handled.
the result shows that the jsdom "window" is closed before the real HTTP request starts outgoing and then, this request's status is set to be "abort".
bogon: yarn test:dom
yarn run v1.10.1
$ jest --config jest.config.js
PASS animate-image.spec.tsx
✓ image (75ms)
console.log xxxxxxxxx/animate-image.spec.tsx:34
end ##################################################################
window close
document close
http://XXXXX.cdn.com
abort
some piece of code in the request.js, may be helpful to understand the problem:
var defer = typeof setImmediate === 'undefined'
? process.nextTick
: setImmediate
defer(function () {
if (self._aborted) {
return
}
var end = function () {
if (self._form) {
if (!self._auth.hasAuth) {
self._form.pipe(self)
} else if (self._auth.hasAuth && self._auth.sentAuth) {
self._form.pipe(self)
}
}
if (self._multipart && self._multipart.chunked) {
self._multipart.body.pipe(self)
}
if (self.body) {
if (isstream(self.body)) {
self.body.pipe(self)
} else {
setContentLength()
if (Array.isArray(self.body)) {
self.body.forEach(function (part) {
self.write(part)
})
} else {
self.write(self.body)
}
self.end()
}
} else if (self.requestBodyStream) {
console.warn('options.requestBodyStream is deprecated, please pass the request object to stream.pipe.')
self.requestBodyStream.pipe(self)
} else if (!self.src) {
if (self._auth.hasAuth && !self._auth.sentAuth) {
self.end()
return
}
if (self.method !== 'GET' && typeof self.method !== 'undefined') {
self.setHeader('content-length', 0)
}
self.end()
}
}
if (self._form && !self.hasHeader('content-length')) {
// Before ending the request, we had to compute the length of the whole form, asyncly
self.setHeader(self._form.getHeaders(), true)
self._form.getLength(function (err, length) {
if (!err && !isNaN(length)) {
self.setHeader('content-length', length)
}
end()
})
} else {
end()
}
self.ntick = true
})
Request.prototype.start = function () {
// start() is called once we are ready to send the outgoing HTTP request.
// this is usually called on the first write(), end() or on nextTick()
var self = this
if (self.timing) {
// All timings will be relative to this request's startTime. In order to do this,
// we need to capture the wall-clock start time (via Date), immediately followed
// by the high-resolution timer (via now()). While these two won't be set
// at the _exact_ same time, they should be close enough to be able to calculate
// high-resolution, monotonically non-decreasing timestamps relative to startTime.
var startTime = new Date().getTime()
var startTimeNow = now()
}
if (self._aborted) {
return
}
self._started = true
self.method = self.method || 'GET'
self.href = self.uri.href
if (self.src && self.src.stat && self.src.stat.size && !self.hasHeader('content-length')) {
self.setHeader('content-length', self.src.stat.size)
}
if (self._aws) {
self.aws(self._aws, true)
}
// We have a method named auth, which is completely different from the http.request
// auth option. If we don't remove it, we're gonna have a bad time.
var reqOptions = copy(self)
delete reqOptions.auth
debug('make request', self.uri.href)
// node v6.8.0 now supports a `timeout` value in `http.request()`, but we
// should delete it for now since we handle timeouts manually for better
// consistency with node versions before v6.8.0
delete reqOptions.timeout
try {
self.req = self.httpModule.request(reqOptions)
} catch (err) {
self.emit('error', err)
return
}
if (self.timing) {
self.startTime = startTime
self.startTimeNow = startTimeNow
// Timing values will all be relative to startTime (by comparing to startTimeNow
// so we have an accurate clock)
self.timings = {}
}
var timeout
if (self.timeout && !self.timeoutTimer) {
if (self.timeout < 0) {
timeout = 0
} else if (typeof self.timeout === 'number' && isFinite(self.timeout)) {
timeout = self.timeout
}
}
self.req.on('response', self.onRequestResponse.bind(self))
self.req.on('error', self.onRequestError.bind(self))
self.req.on('drain', function () {
self.emit('drain')
})
self.req.on('socket', function (socket) {
// `._connecting` was the old property which was made public in node v6.1.0
var isConnecting = socket._connecting || socket.connecting
if (self.timing) {
self.timings.socket = now() - self.startTimeNow
if (isConnecting) {
var onLookupTiming = function () {
self.timings.lookup = now() - self.startTimeNow
}
var onConnectTiming = function () {
self.timings.connect = now() - self.startTimeNow
}
socket.once('lookup', onLookupTiming)
socket.once('connect', onConnectTiming)
// clean up timing event listeners if needed on error
self.req.once('error', function () {
socket.removeListener('lookup', onLookupTiming)
socket.removeListener('connect', onConnectTiming)
})
}
}
var setReqTimeout = function () {
// This timeout sets the amount of time to wait *between* bytes sent
// from the server once connected.
//
// In particular, it's useful for erroring if the server fails to send
// data halfway through streaming a response.
self.req.setTimeout(timeout, function () {
if (self.req) {
self.abort()
var e = new Error('ESOCKETTIMEDOUT')
e.code = 'ESOCKETTIMEDOUT'
e.connect = false
self.emit('error', e)
}
})
}
if (timeout !== undefined) {
// Only start the connection timer if we're actually connecting a new
// socket, otherwise if we're already connected (because this is a
// keep-alive connection) do not bother. This is important since we won't
// get a 'connect' event for an already connected socket.
if (isConnecting) {
var onReqSockConnect = function () {
socket.removeListener('connect', onReqSockConnect)
clearTimeout(self.timeoutTimer)
self.timeoutTimer = null
setReqTimeout()
}
socket.on('connect', onReqSockConnect)
self.req.on('error', function (err) { // eslint-disable-line handle-callback-err
socket.removeListener('connect', onReqSockConnect)
})
// Set a timeout in memory - this block will throw if the server takes more
// than `timeout` to write the HTTP status and headers (corresponding to
// the on('response') event on the client). NB: this measures wall-clock
// time, not the time between bytes sent by the server.
self.timeoutTimer = setTimeout(function () {
socket.removeListener('connect', onReqSockConnect)
self.abort()
var e = new Error('ETIMEDOUT')
e.code = 'ETIMEDOUT'
e.connect = true
self.emit('error', e)
}, timeout)
} else {
// We're already connected
setReqTimeout()
}
}
self.emit('socket', socket)
})
self.emit('request', self.req)
}
I can't get the HTTP request sent to fetch the image source. Thus I can't get the img.onload handler to be called.
anyone could help me to explain this problem?

Finally I didn't find a way to send a request successfully for loading image.
My solution is: mock the HTMLImageElement's prototype in my test code:
Object.defineProperty(HTMLImageElement.prototype, 'naturalWidth', { get: () => 120 });
Object.defineProperty(HTMLImageElement.prototype, 'complete', { get: () => true });
Thus I don't need to get the real image any more and meanwhile I can finish my test case successfully.

Related

Console log message fires despite of the condition statement - Vue2

Purpose of my code, is to fire fetchNumbers() (that fetches numbers from API) when a user scrolls bottom of the page, some kind of infinite scroll. I'm having issue with condition inside axios promise (then), because it fires two console.log outputs at the same time. Seems like condition is ignored at all.
Method i'm using:
methods: {
fetchNumbers (type = 'default', offset = 0, limit = 0) {
return axios.get(globalConfig.NUMBERS_URL)
.then((resp) => {
if (type === 'infinite') {
console.log('infinite fired')
} else {
console.log('default fired')
}
})
}
Mounted function (where i suspect the issue):
mounted () {
window.onscroll = () => {
let bottomOfWindow = document.documentElement.scrollTop + window.innerHeight > document.documentElement.offsetHeight - 1
if (bottomOfWindow) {
this.fetchNumbers('infinite')
}
}
}
When i reload the page, or enter it, i'm getting 2 console outputs at the same time:
default fired
infinite fired
Sometimes the order is reversed.
UPDATE.
Methods that calling fetchNumbers()
async created () {
await this.fetchNumbers()
}
showCats (bool) {
this.showCategories = bool
if (!bool) {
this.category = [1]
} else {
this.category = []
}
this.isActive = true
this.fetchNumbers()
}
UPDATE 2.
Found the culprit - would post it in an answer.
UPDATE 3.
Issue is indeed with onscroll function. I have 3 pages in my APP: main page, numbers page, contact page. If i go to the numbers page (where onscroll function is mounted), then go to main or contact page, then this onscroll function is still attached and when i reach the bottom - it fires my api call, even if it's not the numbers page. Is it possible to limit this function only to numbers page?
I had to disable onscroll listener on destroy:
destroyed () {
window.onscroll = null
}
So when i visit the main or contact page, that listener won't be attached.
Also i had to move onscroll listener from mounted to created, because when it was in mounted () it was firing twice:
created () {
window.onscroll = () => {
let bottomOfWindow = document.documentElement.scrollTop + window.innerHeight > document.documentElement.offsetHeight - 1
if (bottomOfWindow) {
this.isActive = true
this.fetchNumbers('infinite', counter++ * this.limit)
}
}
}
fetchNumbers is most definetly called twice.
Try log every context as type.
fetchNumbers (type = 'default', offset = 0, limit = 0) {
return axios.get(globalConfig.NUMBERS_URL)
.then((resp) => {
if (type === 'infinite') {
console.log(type,'infinite fired'); //this will always print 'infinite', 'inifinite fired'
} else {
console.log(type,'default fired'); //this will print the caller's name and 'default fired'
type = 'default'; //after logging reset type to 'default' to continue the execution
}
})
}
async created () {
await this.fetchNumbers('created');
}
showCats (bool) {
this.showCategories = bool
if (!bool) {
this.category = [1];
} else {
this.category = [];
}
this.isActive = true
this.fetchNumbers('showCats');
}
Either created or showCats is being fired for some reason at the same time as window.onscroll. The randomly changing order suggests a race condition between the two methods.
UPDATE
This should work, provided you nested the desired page in a div with id="number_page_div_id" (you may probably have to adjust that elements height and position):
created () {
let element = getElementById('numbers_page_div_id');
element.onscroll = () => {
let bottomOfWindow = element.scrollTop + window.innerHeight > element.offsetHeight - 1;
if (bottomOfWindow) {
this.fetchNumbers('infinite')
}
}
}
you may approach separate condition instead of if/else e.g.
.then((resp) => {
if (type === 'infinite') {
console.log('infinite fired')
}
if (type === 'default '){
console.log('default fired')
}
})

React-Native — Global variable not changing

I have two separate files, the first being a component (List.js) that uses the second (APIService.js) to fetch different APIs. To correct fetch, the URL needs to receive global variables. Right now, I am trying to redefine these variables from a function in the APIService file without success. Variables are being redefined in APIService.js just before the API calls comment.
I have two questions:
Why is the global variable naptanId not being redefined?
Would be possible to define and pass these variables from the component?
Pseudo-code
Detects beacon
Redefine naptanId
Component fetch API using recently defined variable
API call is done
Data is passed back to Component
Set states
List.js
componentDidMount() {
// Executes first function
APIService._fetchStopPoint((resp1) => {
console.log("Stoppoint", resp1)
// ... and set the bus state with the first response
this.setState({
bus: resp1
});
// ... based on the response, convert array to string
const lines = (resp1.lines.map((line) => line.name)).toString()
// ... pass lines to sencond function
APIService._fetchArrivalTimes(lines, (resp2) => {
// .. and set the tube state with the second response
this.setState({
isLoading: false,
tube: resp2
});
});
});
}
APIService.js
// Variables
// ***********************************************************************
let naptanId = undefined
let lines = undefined
let ice = '59333'
let mint = '57011'
let blueberry = '27686'
let nearestBeacon = undefined;
let newBeaconId = undefined;
let setIce = false;
let setBlueberry = false;
let setMint = false;
// Beacon detection
// ***********************************************************************
const region = {
identifier: 'Estimotes',
uuid: '354A97D8-9CAF-0DC7-CE0E-02352EBE90CD',
};
// Request for authorization while the app is open
Beacons.requestWhenInUseAuthorization();
Beacons.startMonitoringForRegion(region);
Beacons.startRangingBeaconsInRegion(region);
Beacons.startUpdatingLocation();
// Listen for beacon changes
const subscription = DeviceEventEmitter.addListener('beaconsDidRange', (data) => {
const ibeacons = data.beacons
// var lowestAccuracySeen = 0.5;
let lowestAccuracySeen = "immediate"
// Check if beacons are updating
if (ibeacons && ibeacons.length > 0) {
// Loop through beacons array
for (var i = 0; i < ibeacons.length ; i++) {
// Find beacons with same minor ...
var foundBeacon = ibeacons.find(function(closestBeacon) {
// ... and return the beacon the lowest accuracy seen
// return closestBeacon.accuracy.toFixed(2) < lowestAccuracySeen;
return closestBeacon.proximity == lowestAccuracySeen
});
// If found ...
if (foundBeacon) {
// ... define the lowest accuracy and the nearest beacon
lowestAccuracySeen = foundBeacon.accuracy;
nearestBeacon = foundBeacon;
// Identify what component to render against nearest beacon
setIce = nearestBeacon.minor == ice ? true : false;
setMint = nearestBeacon.minor == mint ? true : false;
setBlueberry = nearestBeacon.minor == blueberry ? true : false;
if (setIce) {
// THESE VARIABLES CANNOT BE REDEFINED
naptanId = "490004936E"
lines = "55"
} else if (setMint) {
} else if (setBlueberry) {
};
}
}
}
});
// API calls
// ***********************************************************************
class APIService {
// Fecth stop point info
static _fetchStopPoint(cb) {
console.log(naptanId, lines)
fetch(`https://api.tfl.gov.uk/StopPoint/${naptanId}`)
.then(stopData => {
try {
stopData = JSON.parse(stopData._bodyText); // Converts data to a readable format
cb(stopData, naptanId);
} catch(e) {
cb(e);
}
})
.catch(e => cb(e));
}
// Fetch arrival times info
static _fetchArrivalTimes(lines, cb) {
fetch(`https://api.tfl.gov.uk/Line/${lines}/Arrivals/${naptanId}`)
.then(arrivalData => {
try {
arrivalData = JSON.parse(arrivalData._bodyText);
arrivalTime = arrivalData
cb(arrivalData);
} catch(e) {
cb(e);
}
})
.catch(e => cb(e));
}
// Fetch status info
static _fetchStatus(lines) {
fetch(`https://api-argon.digital.tfl.gov.uk/Line/${lines}/Status`)
.then(statusData => {
try {
statusData = JSON.parse(statusData._bodyText); // Converts data to a readable format
cb(statusData);
} catch(e) {
cb(e);
}
})
.catch(e => cb(e));
}
}
module.exports = APIService;
The simplest approach to handle these global variables (cross different components) is to use AsyncStorage:
let response = await AsyncStorage.getItem('listOfTasks'); //get, in any components
AsyncStorage.setItem('listOfTasks', 'I like to save it.'); //set, in any components
For more performance critical global vars, you can also consider Realm Database (Like CoreData, SQLite in both iOS and Android).

Angular- Scanner.js - Uncaught TypeError: Cannot read property 'processScannedImage' of undefined

We are working on Angular application that integrates with a TWAIN scanner, using scanner.js
We can get successfully get the image from the scanner but, the problem we got is after we process the scanned image we get the following error
Cannot read property 'processScannedImage' of undefined
we believe that the errors is coming from this scoping issue but we do not know what is exactly.Here is the used code as in scanner.js docs, but we converted it into TS
import '../../../assets/scripts/scanner.js';
import { TypeFormatterService } from 'app/main/services/type-formatter.service';
declare let scanner;
/** Processes a ScannedImage */
private processScannedImage(scannedImage): any {
console.log(scannedImage)
scannedImage = this.typeFormatter.caseRequestImageFormat(scannedImage);
console.log(scannedImage)
this._scannedImages.push(scannedImage);
const elementImg = scanner.createDomElementFromModel({
'name': 'img',
'attributes': {
'class': 'scanned',
'src': scannedImage.src
}
});
document.getElementById('images').appendChild(elementImg);
console.log(this._scannedImages);
}
/** Processes the scan result */
private displayImagesOnPage(successful, mesg, response): any {
if (!successful) { // On error
console.error('Failed: ' + mesg);
return;
}
// User canceled.
if (successful && mesg !== null && mesg.toLowerCase().indexOf('user cancel') >= 0) {
console.log('User canceled');
return;
}
// returns an array of ScannedImage
// function getScannedImages(response, includeOriginals, includeThumbnails)
const that = this;
const scannedImages = scanner.getScannedImages(response, true, false);
for (let i = 0; (scannedImages instanceof Array) && i < scannedImages.length; i++) {
const scannedImage = scannedImages[i];
that.processScannedImage(scannedImage);
}
}
public scanToJpg() {
const that = this;
scanner.scan(that.displayImagesOnPage,
{
'show_scanner_ui': false, // Whether scanner UI should be shown
'output_settings':
[
{
'type': 'return-base64',
'format': 'jpg'
}
]
}
);
}
Yes it is an issue with the scope. When the scanner.js plugin calls the displayImagesOnPage function from the line scanner.scan(that.displayImagesOnPage it looses the scope of your current class. In order to pass the scope along with the function, you can bind the scope to the function.
This is how I got it to work:
scanner.scan(this.displayImagesOnPage.bind(this)

Node/commonJS can "private" variable leak between requests

In an imaginary Session module as bellow, could the _sessData variable be leaked in between request. For instance maybe a user just logged in, and at a "same time" a isAuthed() called is made for a different user. Could this be a problem? This module would be called on every request so I guess it's safe but a confirmation would be great.
module.exports = function(app) {
var _sessData = null;
function Session() {
//
}
Session.prototype.set = function( payload ) {
Cookies.set('session', payload);
_sessData = payload;
}
Session.prototype.isAuthed = function() {
return _sessData && Object.keys(_sessData).length > 0;
}
Session.prototype.clear = function() {
Cookies.set('session', '');
_sessData = {};
}
Object.defineProperty(app.context, 'Session', {
// Not exaclty sure what is happening here with this and _ctx..
// Note: apprently ctx is bound to the middleware when call()ing
get: function() { return new Session(this); }
});
return function * (next) {
var token = Cookies.get('jwt');
if ( ! token ) {
_sessData = {};
return yield* next;
}
try {
_sessData = jwt.verify(token, SECRET);
} catch(e) {
if (e.name === 'TokenExpiredError') {
this.Session.clear();
}
}
yield* next;
}
}
EDIT:
The module get used in a KoaJS app like so (the above module does not produce a proper KoaJS middleware but this is beside the point):
var app = require('koa')();
// JWT session middleware
var session = require("./session")();
app.use(session);
app.listen(3080);
What you are exporting is a function, so _sessData does not actually exist when you import the module. It gets created when you call the function. Each time the function is called -- and it needs to be called once per request -- a new variable in that scope with the name _sessData is created. No, they cannot interfere with each other.

Reading a file in real-time using Node.js

I need to work out the best way to read data that is being written to a file, using node.js, in real time. Trouble is, Node is a fast moving ship which makes finding the best method for addressing a problem difficult.
What I Want To Do
I have a java process that is doing something and then writing the results of this thing it does to a text file. It typically takes anything from 5 mins to 5 hours to run, with data being written the whole time, and can get up to some fairly hefty throughput rates (circa. 1000 lines/sec).
I would like to read this file, in real time, and then, using node aggregate the data and write it to a socket where it can be graphed on the client.
The client, graphs, sockets and aggregation logic are all done but I am confused about the best approach for reading the file.
What I Have Tried (or at least played with)
FIFO - I can tell my Java process to write to a fifo and read this using node, this is in fact how we have this currently implemted using Perl, but because everything else is running in node it makes sense to port the code over.
Unix Sockets - As above.
fs.watchFile - will this work for what we need?
fs.createReadStream - is this better than watchFile?
fs & tail -f - seems like a hack.
What, actually, is my Question
I am tending towards using Unix Sockets, this seems the fastest option. But does node have better built-in features for reading files from the fs in real time?
If you want to keep the file as a persistent store of your data to prevent a loss of stream in case of a system crash or one of the members in your network of running processes dies, you can still continue on writing to a file and reading from it.
If you do not need this file as a persistent storage of produced results from your Java process, then going with a Unix socket is much better for both the ease and also the performance.
fs.watchFile() is not what you need because it works on file stats as filesystem reports it and since you want to read the file as it is already being written, this is not what you want.
SHORT UPDATE: I am very sorry to realize that although I had accused fs.watchFile() for using file stats in previous paragraph, I had done the very same thing myself in my example code below! Although I had already warned readers to "take care!" because I had written it in just a few minutes without even testing well; still, it can be done better by using fs.watch() instead of watchFile or fstatSync if underlying system supports it.
For reading/writing from a file, I have just written below for fun in my break:
test-fs-writer.js: [You will not need this since you write file in your Java process]
var fs = require('fs'),
lineno=0;
var stream = fs.createWriteStream('test-read-write.txt', {flags:'a'});
stream.on('open', function() {
console.log('Stream opened, will start writing in 2 secs');
setInterval(function() { stream.write((++lineno)+' oi!\n'); }, 2000);
});
test-fs-reader.js: [Take care, this is just demonstration, check err objects!]
var fs = require('fs'),
bite_size = 256,
readbytes = 0,
file;
fs.open('test-read-write.txt', 'r', function(err, fd) { file = fd; readsome(); });
function readsome() {
var stats = fs.fstatSync(file); // yes sometimes async does not make sense!
if(stats.size<readbytes+1) {
console.log('Hehe I am much faster than your writer..! I will sleep for a while, I deserve it!');
setTimeout(readsome, 3000);
}
else {
fs.read(file, new Buffer(bite_size), 0, bite_size, readbytes, processsome);
}
}
function processsome(err, bytecount, buff) {
console.log('Read', bytecount, 'and will process it now.');
// Here we will process our incoming data:
// Do whatever you need. Just be careful about not using beyond the bytecount in buff.
console.log(buff.toString('utf-8', 0, bytecount));
// So we continue reading from where we left:
readbytes+=bytecount;
process.nextTick(readsome);
}
You can safely avoid using nextTick and call readsome() directly instead. Since we are still working sync here, it is not necessary in any sense. I just like it. :p
EDIT by Oliver Lloyd
Taking the example above but extending it to read CSV data gives:
var lastLineFeed,
lineArray;
function processsome(err, bytecount, buff) {
lastLineFeed = buff.toString('utf-8', 0, bytecount).lastIndexOf('\n');
if(lastLineFeed > -1){
// Split the buffer by line
lineArray = buff.toString('utf-8', 0, bytecount).slice(0,lastLineFeed).split('\n');
// Then split each line by comma
for(i=0;i<lineArray.length;i++){
// Add read rows to an array for use elsewhere
valueArray.push(lineArray[i].split(','));
}
// Set a new position to read from
readbytes+=lastLineFeed+1;
} else {
// No complete lines were read
readbytes+=bytecount;
}
process.nextTick(readFile);
}
Why do you think tail -f is a hack?
While figuring out I found a good example I would do something similar.
Real time online activity monitor example with node.js and WebSocket:
http://blog.new-bamboo.co.uk/2009/12/7/real-time-online-activity-monitor-example-with-node-js-and-websocket
Just to make this answer complete, I wrote you an example code which would run under 0.8.0 - (the http server is a hack maybe).
A child process is spawned running with tail, and since a child process is an EventEmitter with three streams (we use stdout in our case) you can just add the a listener with on
filename: tailServer.js
usage: node tailServer /var/log/filename.log
var http = require("http");
var filename = process.argv[2];
if (!filename)
return console.log("Usage: node tailServer filename");
var spawn = require('child_process').spawn;
var tail = spawn('tail', ['-f', filename]);
http.createServer(function (request, response) {
console.log('request starting...');
response.writeHead(200, {'Content-Type': 'text/plain' });
tail.stdout.on('data', function (data) {
response.write('' + data);
});
}).listen(8088);
console.log('Server running at http://127.0.0.1:8088/');
this module is an implementation of the principle #hasanyasin suggests:
https://github.com/felixge/node-growing-file
I took the answer from #hasanyasin and wrapped it up into a modular promise. The basic idea is that you pass a file and a handler function that does something with the stringified-buffer that is read from the file. If the handler function returns true, then the file will stop being read. You can also set a timeout that will kill reading if the handler doesn't return true fast enough.
The promiser will return true if the resolve() was called due to timeout, otherwise it will return false.
See the bottom for usage example.
// https://stackoverflow.com/a/11233045
var fs = require('fs');
var Promise = require('promise');
class liveReaderPromiseMe {
constructor(file, buffStringHandler, opts) {
/*
var opts = {
starting_position: 0,
byte_size: 256,
check_for_bytes_every_ms: 3000,
no_handler_resolution_timeout_ms: null
};
*/
if (file == null) {
throw new Error("file arg must be present");
} else {
this.file = file;
}
if (buffStringHandler == null) {
throw new Error("buffStringHandler arg must be present");
} else {
this.buffStringHandler = buffStringHandler;
}
if (opts == null) {
opts = {};
}
if (opts.starting_position == null) {
this.current_position = 0;
} else {
this.current_position = opts.starting_position;
}
if (opts.byte_size == null) {
this.byte_size = 256;
} else {
this.byte_size = opts.byte_size;
}
if (opts.check_for_bytes_every_ms == null) {
this.check_for_bytes_every_ms = 3000;
} else {
this.check_for_bytes_every_ms = opts.check_for_bytes_every_ms;
}
if (opts.no_handler_resolution_timeout_ms == null) {
this.no_handler_resolution_timeout_ms = null;
} else {
this.no_handler_resolution_timeout_ms = opts.no_handler_resolution_timeout_ms;
}
}
startHandlerTimeout() {
if (this.no_handler_resolution_timeout_ms && (this._handlerTimer == null)) {
var that = this;
this._handlerTimer = setTimeout(
function() {
that._is_handler_timed_out = true;
},
this.no_handler_resolution_timeout_ms
);
}
}
clearHandlerTimeout() {
if (this._handlerTimer != null) {
clearTimeout(this._handlerTimer);
this._handlerTimer = null;
}
this._is_handler_timed_out = false;
}
isHandlerTimedOut() {
return !!this._is_handler_timed_out;
}
fsReadCallback(err, bytecount, buff) {
try {
if (err) {
throw err;
} else {
this.current_position += bytecount;
var buff_str = buff.toString('utf-8', 0, bytecount);
var that = this;
Promise.resolve().then(function() {
return that.buffStringHandler(buff_str);
}).then(function(is_handler_resolved) {
if (is_handler_resolved) {
that.resolve(false);
} else {
process.nextTick(that.doReading.bind(that));
}
}).catch(function(err) {
that.reject(err);
});
}
} catch(err) {
this.reject(err);
}
}
fsRead(bytecount) {
fs.read(
this.file,
new Buffer(bytecount),
0,
bytecount,
this.current_position,
this.fsReadCallback.bind(this)
);
}
doReading() {
if (this.isHandlerTimedOut()) {
return this.resolve(true);
}
var max_next_bytes = fs.fstatSync(this.file).size - this.current_position;
if (max_next_bytes) {
this.fsRead( (this.byte_size > max_next_bytes) ? max_next_bytes : this.byte_size );
} else {
setTimeout(this.doReading.bind(this), this.check_for_bytes_every_ms);
}
}
promiser() {
var that = this;
return new Promise(function(resolve, reject) {
that.resolve = resolve;
that.reject = reject;
that.doReading();
that.startHandlerTimeout();
}).then(function(was_resolved_by_timeout) {
that.clearHandlerTimeout();
return was_resolved_by_timeout;
});
}
}
module.exports = function(file, buffStringHandler, opts) {
try {
var live_reader = new liveReaderPromiseMe(file, buffStringHandler, opts);
return live_reader.promiser();
} catch(err) {
return Promise.reject(err);
}
};
Then use the above code like this:
var fs = require('fs');
var path = require('path');
var Promise = require('promise');
var liveReadAppendingFilePromiser = require('./path/to/liveReadAppendingFilePromiser');
var ending_str = '_THIS_IS_THE_END_';
var test_path = path.join('E:/tmp/test.txt');
var s_list = [];
var buffStringHandler = function(s) {
s_list.push(s);
var tmp = s_list.join('');
if (-1 !== tmp.indexOf(ending_str)) {
// if this return never occurs, then the file will be read until no_handler_resolution_timeout_ms
// by default, no_handler_resolution_timeout_ms is null, so read will continue forever until this function returns something that evaluates to true
return true;
// you can also return a promise:
// return Promise.resolve().then(function() { return true; } );
}
};
var appender = fs.openSync(test_path, 'a');
try {
var reader = fs.openSync(test_path, 'r');
try {
var options = {
starting_position: 0,
byte_size: 256,
check_for_bytes_every_ms: 3000,
no_handler_resolution_timeout_ms: 10000,
};
liveReadAppendingFilePromiser(reader, buffStringHandler, options)
.then(function(did_reader_time_out) {
console.log('reader timed out: ', did_reader_time_out);
console.log(s_list.join(''));
}).catch(function(err) {
console.error('bad stuff: ', err);
}).then(function() {
fs.closeSync(appender);
fs.closeSync(reader);
});
fs.write(appender, '\ncheck it out, I am a string');
fs.write(appender, '\nwho killed kenny');
//fs.write(appender, ending_str);
} catch(err) {
fs.closeSync(reader);
console.log('err1');
throw err;
}
} catch(err) {
fs.closeSync(appender);
console.log('err2');
throw err;
}

Categories

Resources