I have search feature implemented on fs so when i have input string from client i split files data based on each line, but if you see server.log if i pulled data based on line
it missed data from chunk that has multiple lines e.g you can see first event is two lines so based on my search it will return [2017-03-22T20:25:04Z]|zldv6658|info|bmid: n/a|infra.actorRouter|Adding event to queue: { queue: 'd-email',
it will miss second line for that event. How can i get complete data may be based on time variable ?
searchService.js
async.eachSeries(filesData.logFiles, function(logfile, done) {
// read file
console.log('SearchEnv in eachSeries', filesData.searchEnv);
fs.createReadStream('./logs/' + filesData.searchEnv + '/' + logfile.filename)
.pipe(split())
.on('data', function(line) {
if (line.toLowerCase().indexOf(searchStr.toLowerCase()) != -1) parseLog(line, prevLine);
else prevLine = line;
});
function parseLog(line, prev) {
// Very rudimentary check...
if (line.indexOf('|') === -1) line = prev + line;
// Parse as you were doing
var messageDateInfo = line.split('|')[0].replace(/[\[\]']+/g, '');
console.log('1st message date is', messageDateInfo)
messageDateInfo = new Date(messageDateInfo).getTime();
searchStartDate = new Date(searchStartDate).getTime();
searchEndDate = new Date(searchEndDate).getTime();
console.log('message date is', messageDateInfo)
console.log('start date is ', messageDateInfo - searchStartDate);
console.log('end date is ', searchEndDate - messageDateInfo);
if (messageDateInfo - searchStartDate > 0 && searchEndDate - messageDateInfo > 0) {
// console.log("message date is within this time range");
results.push({
filename: logfile.filename,
value: line
});
}
}
done();
}, function(err) {
if (err) {
console.log('error', err);
}
// wrong: results.map(result, function (result){
results.map(function(result) {
console.log('results');
});
// send back results
callback(results);
results = [];
logFiles = null;
});
}
server.log
[2017-03-22T20:25:04Z]|zldv6658|info|bmid: n/a|infra.actorRouter|Adding event to queue: { queue: 'd-email',
msgId: '7eec01e9-6395-4fee-b44f-f09a40e56978' }
[2017-03-22T20:25:04Z]|zldv6658|info|bmid: n/a|infra.templateActor|Filter match for actor/rule (d-email/email_service) with msgId: 7eec01e9-6395-4fee-b44f-f09a40e56978
[2017-03-22T20:25:04Z]|zldv6658|info|bmid: 7eec01e9-6395-4fee-b44f-f09a40e56978|mailDispatcher|Received mail event. msgId=7eec01e9-6395-4fee-b44f-f09a40e56978
[2017-03-22T20:25:04Z]|zldv6658|info|bmid: n/a|mailDispatcher|Mail event with msgId 7eec01e9-6395-4fee-b44f-f09a40e56978 successful: 3 messages delivered
[2017-03-22T20:25:05Z]|zldv6658|verbose|bmid: n/a|routes.event|Received Event from IP (::ffff:130.9.137.139): 74609753-143b-4e06-845c-9a5721575c19
{"event":{"header":{"eventSource":"AOTSvTM","timestamp":1481966987000,"eventType":"http://aotsvtm.eventing.att.com/SendEscalationsEvent/V1","entityId":"ENTITYID_1"}
You can use the split module (similarly how I demonstrated in my other answer to your very similar question) with the fs module.
fs.createReadStream(file)
.pipe(split())
.on('data', function (line) {
//each chunk now is a seperate line!
});
See the docs: https://www.npmjs.com/package/split
If your log actually has multiline events, you could just keep the previous line(s) in memory while parsing. Also, don't just load the whole thing at once in memory. Use streams to reduce the strain on your machine.
let prevLine;
fs.createReadStream(file)
.pipe(split())
.on('data', function (line) {
if (line.toLowerCase().indexOf(searchStr.toLowerCase()) != -1) parseLog(line, prevLine);
else prevLine = line;
});
});
function parseLog(line, prev) {
// Very rudimentary check...
if (line.indexOf('|') === -1) line = prev + line;
// Parse as you were doing
}
As a rule of thumb for the future, log files are much easier to manage when build with single-line-json.
Related
I have a problem, my code in Arduino allows me to send values to turn on a led with Serial.read (), but when I do it with node js port.write () I have tried several ways but it does not turn on the led.
arduino code
int pin = 12;
String vali="OFF";
char val;
void setup() {
pinMode(pin,OUTPUT);
Serial.begin(9600);
}
void loop() {
val = Serial.read();
if(val == '1'){
digitalWrite(pin,HIGH);
vali="ON";
}
else if(val == '0'){
digitalWrite(pin,LOW);
vali="OFF";
}
delay(1000);
Serial.println(" estado:"+ vali );
}
js code
const Serialport = require('serialport');
const readline = Serialport.parsers.Readline;
var val = 0;
const port = new Serialport('COM3', {
baudRate: 9600
});
const parse = port.pipe(new readline({ delimiter: '\r\n' }));
port.on('open', function() {
console.log('conect ');
});
port.write("1\r\n");
parse.write("1\r\n");
parse.on("data", (data) => {
console.log(data);
});
It is stopping because of the delay(1000); as it is a blocking delay and Serial.read isn't able to fully read data. Try removing it and adding statements like
if(Serail.available > 0)
{
// Then read data and afterwards turn the led on or off with delay inside here.
// Also for 1,2 or any other numeric you can use Serial.parseInt() function
// to read them
}
In my add-in I am making an HTTP request and receiving an output. I want to place that output into a binding and have it expand the binding if necessary because the user won't necessarily know how many rows x columns the output will be. How would I go about doing this? Currently I am binding to a range, but if that range does not match the size of the [[]] that I am providing, then the data is not displayed in the sheet. So, this ends up requiring the user to know the size of the output.
What I'm doing currently using Angular is as follows (the problem with this being that the output isn't always the same size as the Office.BindingType.Matrix that the user selected in the spreadsheet):
I create the binding to where the output should be placed as follows:
inputBindFromPrompt(parameterId: number): Promise<IOfficeResult> {
let bindType: Office.BindingType;
if(this.inputBindings[parameterId].type != 'data.frame' && this.inputBindings[parameterId].type != 'vector') {
bindType = Office.BindingType.Text;
} else {
bindType = Office.BindingType.Matrix;
}
return new Promise((resolve, reject) => {
this.workbook.bindings.addFromPromptAsync(bindType, { id: this.inputBindings[parameterId].name },
(addBindingResult: Office.AsyncResult) => {
if(addBindingResult.status === Office.AsyncResultStatus.Failed) {
reject({
error: 'Unable to bind to workbook. Error: ' + addBindingResult.error.message
});
} else {
this.inputBindings[parameterId].binding = addBindingResult.value;
resolve({
success: 'Created binding ' + addBindingResult.value.type + ' on ' + addBindingResult.value.id
});
}
})
})
}
Then when the user submits via a button, the inputs are passed to a HTTP request service which then receives an output that I process into an array of arrays so that it can go into an Office.BindingType.Matrix:
this.isBusy = true;
this.feedback = 'submitted';
// Grab the values from the form
// Send as a POST and receive an output
// Put the output in the Excel sheet
this.webServicesService.postWebServices(this.service, this.inputParameters)
.subscribe(
(data: any) => {
// Correctly received data
// Access the data by name while looping through output parameters
this.error = false;
this.feedback = 'received data';
let i = 0;
this.outputParameters.forEach(element => {
// temporary name to identify the parameter
let name = element.name;
// Set the data value in the parameter
if(element.type == 'data.frame') {
let parameter = data[name];
this.feedback = parameter;
let excelData = [];
for(var key in parameter) {
if(parameter.hasOwnProperty(key)) {
var val = parameter[key];
excelData.push(val);
}
}
element.value = excelData;
}
else {
element.value = data[name];
}
// Set value in the form
let param = (<FormArray>this.serviceForm.controls['outputParameters']).at(i);
param.patchValue({
value: element.value
});
// Set value in the spreadsheet
this.excelService.outputSetText(i, element.value)
.then((result: IOfficeResult) => {
this.onResult(result);
i++;
});
}, (result: IOfficeResult) => {
this.onResult(result);
});
},
(error) => {
if(error.status == 400 || error.status == 401) {
// Return user to authentication page
this.authService.logout();
this.router.navigate(['/']);
} else {
// Tell user to try again
this.error = true;
}
}
);
The line above that is setting the value to the Office.Matrix.Binding is this.excelService.outputSetText(i, element.value), which calls this method in the Excel Service:
outputSetText(parameterId: number, data: any): Promise<IOfficeResult> {
return new Promise((resolve, reject) => {
if(this.outputBindings[parameterId].binding) {
this.outputBindings[parameterId].binding.setDataAsync(data, function (result: Office.AsyncResult) {
if(result.status == Office.AsyncResultStatus.Failed) {
reject({ error: 'Failed to set value. Error: ' + result.error.message });
} else {
let test: Office.Binding;
resolve({
success: 'successfully set value'
});
}
})
} else {
reject({
error: 'binding has not been created. bindFromPrompt must be called'
});
}
})
}
It's essentially using addFromPromptAsync() to set an output spot for the HTTP request. Then the user submits which sends the request, receives the data back and processes it into an array of arrays [[]] so that it can be the correct data format for Office.BindingType.Matrix. However, unless this is the same number of rows and columns as the binding originally selected, it won't display in the sheet. So, is there a binding type that will dynamically grow based on the data I give it? Or would I just need to release the current binding and make a new binding according to the size of the HTTP response data?
So long as you're using the "shared" (Office 2013) APIs, you will have this issue.
However, in the host-specific (2016+) APIs, you can easily solve the problem by resizing the range to suit your needs. Or more precisely, getting the binding, then asking for its range, then getting just the first (top-left) cell, and then resizing it:
await Excel.run(async (context) => {
let values = [
["", "Price"],
["Apple", 0.99],
["Orange", 1.59],
];
let firstCell = context.workbook.bindings.getItem("TestBinding").getRange().getCell(0, 0);
let fullRange = firstCell.getResizedRange(
values.length - 1, values[0].length - 1);
fullRange.values = values;
await context.sync();
});
You can try this snippet live in literally five clicks in the new Script Lab (https://aka.ms/getscriptlab). Simply install the Script Lab add-in (free), then choose "Import" in the navigation menu, and use the following GIST URL: https://gist.github.com/Zlatkovsky/5a2fc743bc9c8556d3eb3234e287d7f3. See more info about importing snippets to Script Lab.
I'm trying to set up a game that allows playing with random players. The code below is supposed to create a GameMessage object for both paired players. To relate both objects as part of the same game, I've decided to save the objectId of of the game made for "firstplayer" in the field "otherside" for "secondplayer" and vice-versa. For some reason (perhaps the first save of firstplayer and secondplayer isn't done before the code attempts to retrieve the objectIds, meaning there are no objectIds to get?).
Short version: Why are the "otherside" values not saving?
Parse.Cloud.define("findpartner", function(request, response) {
var User = Parse.Object.extend("_User");
var user = new User();
var currentuser = Parse.User.current();
currentuser.set("searching", 0);
var query = new Parse.Query(User);
query.equalTo("searching", 1);
query.limit(50); //limit to at most 50 users
query.find({
success: function(objects) {
var amount = objects.length;
var indexNum = Math.floor((Math.random() * amount));
var newpartner = objects[indexNum];
if (amount > 0 && newpartner.id !=currentuser.id) {
newpartner.set("searching", 0);
var Firstplayer = Parse.Object.extend("GameMessages");
var firstplayer = new Firstplayer();
var Secondplayer = Parse.Object.extend("GameMessages");
var secondplayer = new Secondplayer();
firstplayer.set("sender", currentuser.id);
firstplayer.set("receiver", newpartner.id);
firstplayer.set("sent",0);
firstplayer.set("received",0);
firstplayer.set("receiverName", newpartner.getUsername());
secondplayer.set("sender", newpartner.id);
secondplayer.set("receiver", currentuser.id);
secondplayer.set("sent",0);
secondplayer.set("received",0);
secondplayer.set("receiverName", currentuser.getUsername());
firstplayer.save().then(function(secondplayer){ <<<
return secondplayer.save(); <<<
}).then(function(firstplayer_update) { <<<
return firstplayer.save({ otherside: secondplayer.id}); <<<
}).then(function(secondplayer_update){ <<<
return secondplayer.save({ otherside: firstplayer.id}); <<<
});
newpartner.save(null, {useMasterKey: true});
}
else {
currentuser.set("searching", 1);
}
currentuser.save();
response.success(amount);
},
error: function(error) {
alert("Error: " + error.code = " " + error.message);
}
});
});
I added arrows to show where the "otherside" is. They're not in the actual code. I do not doubt the code has mistakes though, I do not know javascript. I wrote it solely by studying the parse.com documentation.
I'm not convinced that it makes sense to create these 2 independent messages and link them together, but I won't let that stand in the way of getting this working. This isn't tested, but I've refactored your code and think you should try to glean a few things from it.
// Set this up once, outside of your function, and use it everywhere
var GameMessage = Parse.Object.extend("GameMessages");
Parse.Cloud.define("findpartner", function(request, response) {
// Code defensively, make sure this function requires a user be logged in.
if (!request.user) {
console.log("non-user called findpartner");
return response.error("Unauthorized.");
}
// Get the user who called the function
var user = request.user;
// The end response is a number, apparently
var result = 0;
// The target player
var targetPlayer;
// The two messages that will be used if a match is found
var firstmsg = new GameMessage();
var secondmsg = new GameMessage();
// Create a Users query
var query = new Parse.Query(Parse.User);
query.equalTo("searching", 1);
query.notEqualTo("objectId", user.id);
query.limit(50);
// Remove public access to Find operations for Users in the Data Browser
// Use the master key to query, and use promise syntax.
query.find({ useMasterKey: true }).then(function(objects) {
result = objects.length;
// If no users were found searching, mark the user as searching and save
if (result == 0) {
user.set('searching', 1);
// Return the save promise
return user.save(null, { useMasterKey: true });
}
// Pick a random user out of the response
var indexNum = Math.floor((Math.random() * objects.length));
var targetPlayer = objects[indexNum];
// Set that user to no longer be searching and save
targetPlayer.set("searching", 0);
return targetPlayer.save(null, { useMasterKey: true }).then(function() {
firstmsg.set("sender", user.id);
firstmsg.set("receiver", targetPlayer.id);
firstmsg.set("sent", 0);
firstmsg.set("received", 0);
firstmsg.set("receiverName", targetPlayer.getUsername());
secondmsg.set("sender", targetPlayer.id);
secondmsg.set("receiver", user.id);
secondmsg.set("sent", 0);
secondmsg.set("received", 0);
secondmsg.set("receiverName", user.getUsername());
// Return the promise result of saving both messages
return Parse.Object.saveAll([firstmsg, secondmsg], { useMasterKey: true });
}).then(function(messages) {
// Set the pointers to reference each other
firstmsg.set("otherside", secondmsg.id);
secondmsg.set("otherside", firstmsg.id);
// Return the promise result of saving both messages, again
return Parse.Object.saveAll([firstmsg, secondmsg], { useMasterKey: true });
});
}).then(function() {
// All the stuff above has finished one way or the other, now we just need to
// send back the result. 0 if no match was made.
response.success(result);
}, function(error) {
response.error(error);
});
});
firstplayer.save();
secondplayer.save();
secondplayer.set("otherside",firstplayer.id); <<<
firstplayer.set("otherside",secondplayer.id); <<<
firstplayer.save();
secondplayer.save();
This is the part of code that you say not working. In parse doc you can see that .save() is a non blocking operation. Means the line firstplayer.save() goes immediately to next line(it wont block the thread for saving). So when you set id secondplayer.set("otherside",firstplayer.id) firstplayer.id is still undefined.
So if you want a synchronous logic, like save first_object then save second_object ,
you have to use call backs.
first_object.save({
success: function(saved_first_object) {
second_object.save({
success: function(saved_second_object) {
//process complete
},
failure: function(error){
}
})
},
failure: function(error) {
console.log(error);
}
})
You can also approach it using promises.
http://blog.parse.com/2013/01/29/whats-so-great-about-javascript-promises/
UPDATE: Based on question edit from OP trying promises
Try this
firstplayer.save()
.then(function(saved_firstPlayer){
firstplayer = saved_firstPlayer;
return secondplayer.save();
}).then(function(saved_secondplayer) {
secondplayer = saved_secondplayer;
return firstplayer.save({ otherside: secondplayer.id});
}).then(function(updated_firstplayer){
firstplayer = updated_firstplayer;
return secondplayer.save({ otherside: firstplayer.id});
}).then(function(updated_secondlayer){
secondplayer= update_secondplayer;
});
What's the best way to overwrite a line in a large (2MB+) text file using node.js?
My current method involves
copying the entire file into a buffer.
Spliting the buffer into an array by the new line character (\n).
Overwriting the line by using the buffer index.
Then overwriting the file with the buffer after join with \n.
First, you need to search where the line starts and where it ends. Next you need to use a function for replacing the line. I have the solution for the first part using one of my libraries: Node-BufferedReader.
var lineToReplace = "your_line_to_replace";
var startLineOffset = 0;
var endLineOffset = 0;
new BufferedReader ("your_file", { encoding: "utf8" })
.on ("error", function (error){
console.log (error);
})
.on ("line", function (line, byteOffset){
startLineOffset = endLineOffset;
endLineOffset = byteOffset - 1; //byteOffset is the offset of the NEXT byte. -1 if it's the end of the file, if that's the case, endLineOffset = <the file size>
if (line === lineToReplace ){
console.log ("start: " + startLineOffset + ", end: " + endLineOffset +
", length: " + (endLineOffset - startLineOffset));
this.interrupt (); //interrupts the reading and finishes
}
})
.read ();
Maybe you can try the package replace-in-file
suppose we have a txt file as below
// file.txt
"line1"
"line2"
"line5"
"line6"
"line1"
"line2"
"line5"
"line6"
and we want to replace:
line1 -> line3
line2 -> line4
Then, we can do it like this:
const replace = require('replace-in-file');
const options = {
files: "./file.txt",
from: [/line1/g, /line2/g],
to: ["line3", "line4"]
};
replace(options)
.then(result => {
console.log("Replacement results: ",result);
})
.catch(error => {
console.log(error);
});
the result as below:
// file.txt
"line3"
"line4"
"line5"
"line6"
"line3"
"line4"
"line5"
"line6"
More details please refer to its docs: https://www.npmjs.com/package/replace-in-file
This isn't file size focoused solution, but Overwrites a line in a file using node.js. It may help other people that search engines redirect to this post, like me.
import * as fs from 'fs'
const filename = process.argv[2]
const lineIndexToUpdate = parseInt(process.argv[3]) - 1
const textUpdate = process.argv[4]
function filterLine(indexToUpdate, dataString) {
return dataString
.split('\n')
.map((val, index) => {
if (index === indexToUpdate)
return textUpdate
else
return val
})
.join('\n')
}
fs.readFile(filename, 'utf8', (err, data) => {
if (err) throw err
fs.writeFile(filename, filterLine(lineIndexToUpdate, data), (err, data) => {
if (err) throw err
console.log("Line removed")
})
})
Script use exemple:
node update_line.js file 10 "te voglio benne"
I need to work out the best way to read data that is being written to a file, using node.js, in real time. Trouble is, Node is a fast moving ship which makes finding the best method for addressing a problem difficult.
What I Want To Do
I have a java process that is doing something and then writing the results of this thing it does to a text file. It typically takes anything from 5 mins to 5 hours to run, with data being written the whole time, and can get up to some fairly hefty throughput rates (circa. 1000 lines/sec).
I would like to read this file, in real time, and then, using node aggregate the data and write it to a socket where it can be graphed on the client.
The client, graphs, sockets and aggregation logic are all done but I am confused about the best approach for reading the file.
What I Have Tried (or at least played with)
FIFO - I can tell my Java process to write to a fifo and read this using node, this is in fact how we have this currently implemted using Perl, but because everything else is running in node it makes sense to port the code over.
Unix Sockets - As above.
fs.watchFile - will this work for what we need?
fs.createReadStream - is this better than watchFile?
fs & tail -f - seems like a hack.
What, actually, is my Question
I am tending towards using Unix Sockets, this seems the fastest option. But does node have better built-in features for reading files from the fs in real time?
If you want to keep the file as a persistent store of your data to prevent a loss of stream in case of a system crash or one of the members in your network of running processes dies, you can still continue on writing to a file and reading from it.
If you do not need this file as a persistent storage of produced results from your Java process, then going with a Unix socket is much better for both the ease and also the performance.
fs.watchFile() is not what you need because it works on file stats as filesystem reports it and since you want to read the file as it is already being written, this is not what you want.
SHORT UPDATE: I am very sorry to realize that although I had accused fs.watchFile() for using file stats in previous paragraph, I had done the very same thing myself in my example code below! Although I had already warned readers to "take care!" because I had written it in just a few minutes without even testing well; still, it can be done better by using fs.watch() instead of watchFile or fstatSync if underlying system supports it.
For reading/writing from a file, I have just written below for fun in my break:
test-fs-writer.js: [You will not need this since you write file in your Java process]
var fs = require('fs'),
lineno=0;
var stream = fs.createWriteStream('test-read-write.txt', {flags:'a'});
stream.on('open', function() {
console.log('Stream opened, will start writing in 2 secs');
setInterval(function() { stream.write((++lineno)+' oi!\n'); }, 2000);
});
test-fs-reader.js: [Take care, this is just demonstration, check err objects!]
var fs = require('fs'),
bite_size = 256,
readbytes = 0,
file;
fs.open('test-read-write.txt', 'r', function(err, fd) { file = fd; readsome(); });
function readsome() {
var stats = fs.fstatSync(file); // yes sometimes async does not make sense!
if(stats.size<readbytes+1) {
console.log('Hehe I am much faster than your writer..! I will sleep for a while, I deserve it!');
setTimeout(readsome, 3000);
}
else {
fs.read(file, new Buffer(bite_size), 0, bite_size, readbytes, processsome);
}
}
function processsome(err, bytecount, buff) {
console.log('Read', bytecount, 'and will process it now.');
// Here we will process our incoming data:
// Do whatever you need. Just be careful about not using beyond the bytecount in buff.
console.log(buff.toString('utf-8', 0, bytecount));
// So we continue reading from where we left:
readbytes+=bytecount;
process.nextTick(readsome);
}
You can safely avoid using nextTick and call readsome() directly instead. Since we are still working sync here, it is not necessary in any sense. I just like it. :p
EDIT by Oliver Lloyd
Taking the example above but extending it to read CSV data gives:
var lastLineFeed,
lineArray;
function processsome(err, bytecount, buff) {
lastLineFeed = buff.toString('utf-8', 0, bytecount).lastIndexOf('\n');
if(lastLineFeed > -1){
// Split the buffer by line
lineArray = buff.toString('utf-8', 0, bytecount).slice(0,lastLineFeed).split('\n');
// Then split each line by comma
for(i=0;i<lineArray.length;i++){
// Add read rows to an array for use elsewhere
valueArray.push(lineArray[i].split(','));
}
// Set a new position to read from
readbytes+=lastLineFeed+1;
} else {
// No complete lines were read
readbytes+=bytecount;
}
process.nextTick(readFile);
}
Why do you think tail -f is a hack?
While figuring out I found a good example I would do something similar.
Real time online activity monitor example with node.js and WebSocket:
http://blog.new-bamboo.co.uk/2009/12/7/real-time-online-activity-monitor-example-with-node-js-and-websocket
Just to make this answer complete, I wrote you an example code which would run under 0.8.0 - (the http server is a hack maybe).
A child process is spawned running with tail, and since a child process is an EventEmitter with three streams (we use stdout in our case) you can just add the a listener with on
filename: tailServer.js
usage: node tailServer /var/log/filename.log
var http = require("http");
var filename = process.argv[2];
if (!filename)
return console.log("Usage: node tailServer filename");
var spawn = require('child_process').spawn;
var tail = spawn('tail', ['-f', filename]);
http.createServer(function (request, response) {
console.log('request starting...');
response.writeHead(200, {'Content-Type': 'text/plain' });
tail.stdout.on('data', function (data) {
response.write('' + data);
});
}).listen(8088);
console.log('Server running at http://127.0.0.1:8088/');
this module is an implementation of the principle #hasanyasin suggests:
https://github.com/felixge/node-growing-file
I took the answer from #hasanyasin and wrapped it up into a modular promise. The basic idea is that you pass a file and a handler function that does something with the stringified-buffer that is read from the file. If the handler function returns true, then the file will stop being read. You can also set a timeout that will kill reading if the handler doesn't return true fast enough.
The promiser will return true if the resolve() was called due to timeout, otherwise it will return false.
See the bottom for usage example.
// https://stackoverflow.com/a/11233045
var fs = require('fs');
var Promise = require('promise');
class liveReaderPromiseMe {
constructor(file, buffStringHandler, opts) {
/*
var opts = {
starting_position: 0,
byte_size: 256,
check_for_bytes_every_ms: 3000,
no_handler_resolution_timeout_ms: null
};
*/
if (file == null) {
throw new Error("file arg must be present");
} else {
this.file = file;
}
if (buffStringHandler == null) {
throw new Error("buffStringHandler arg must be present");
} else {
this.buffStringHandler = buffStringHandler;
}
if (opts == null) {
opts = {};
}
if (opts.starting_position == null) {
this.current_position = 0;
} else {
this.current_position = opts.starting_position;
}
if (opts.byte_size == null) {
this.byte_size = 256;
} else {
this.byte_size = opts.byte_size;
}
if (opts.check_for_bytes_every_ms == null) {
this.check_for_bytes_every_ms = 3000;
} else {
this.check_for_bytes_every_ms = opts.check_for_bytes_every_ms;
}
if (opts.no_handler_resolution_timeout_ms == null) {
this.no_handler_resolution_timeout_ms = null;
} else {
this.no_handler_resolution_timeout_ms = opts.no_handler_resolution_timeout_ms;
}
}
startHandlerTimeout() {
if (this.no_handler_resolution_timeout_ms && (this._handlerTimer == null)) {
var that = this;
this._handlerTimer = setTimeout(
function() {
that._is_handler_timed_out = true;
},
this.no_handler_resolution_timeout_ms
);
}
}
clearHandlerTimeout() {
if (this._handlerTimer != null) {
clearTimeout(this._handlerTimer);
this._handlerTimer = null;
}
this._is_handler_timed_out = false;
}
isHandlerTimedOut() {
return !!this._is_handler_timed_out;
}
fsReadCallback(err, bytecount, buff) {
try {
if (err) {
throw err;
} else {
this.current_position += bytecount;
var buff_str = buff.toString('utf-8', 0, bytecount);
var that = this;
Promise.resolve().then(function() {
return that.buffStringHandler(buff_str);
}).then(function(is_handler_resolved) {
if (is_handler_resolved) {
that.resolve(false);
} else {
process.nextTick(that.doReading.bind(that));
}
}).catch(function(err) {
that.reject(err);
});
}
} catch(err) {
this.reject(err);
}
}
fsRead(bytecount) {
fs.read(
this.file,
new Buffer(bytecount),
0,
bytecount,
this.current_position,
this.fsReadCallback.bind(this)
);
}
doReading() {
if (this.isHandlerTimedOut()) {
return this.resolve(true);
}
var max_next_bytes = fs.fstatSync(this.file).size - this.current_position;
if (max_next_bytes) {
this.fsRead( (this.byte_size > max_next_bytes) ? max_next_bytes : this.byte_size );
} else {
setTimeout(this.doReading.bind(this), this.check_for_bytes_every_ms);
}
}
promiser() {
var that = this;
return new Promise(function(resolve, reject) {
that.resolve = resolve;
that.reject = reject;
that.doReading();
that.startHandlerTimeout();
}).then(function(was_resolved_by_timeout) {
that.clearHandlerTimeout();
return was_resolved_by_timeout;
});
}
}
module.exports = function(file, buffStringHandler, opts) {
try {
var live_reader = new liveReaderPromiseMe(file, buffStringHandler, opts);
return live_reader.promiser();
} catch(err) {
return Promise.reject(err);
}
};
Then use the above code like this:
var fs = require('fs');
var path = require('path');
var Promise = require('promise');
var liveReadAppendingFilePromiser = require('./path/to/liveReadAppendingFilePromiser');
var ending_str = '_THIS_IS_THE_END_';
var test_path = path.join('E:/tmp/test.txt');
var s_list = [];
var buffStringHandler = function(s) {
s_list.push(s);
var tmp = s_list.join('');
if (-1 !== tmp.indexOf(ending_str)) {
// if this return never occurs, then the file will be read until no_handler_resolution_timeout_ms
// by default, no_handler_resolution_timeout_ms is null, so read will continue forever until this function returns something that evaluates to true
return true;
// you can also return a promise:
// return Promise.resolve().then(function() { return true; } );
}
};
var appender = fs.openSync(test_path, 'a');
try {
var reader = fs.openSync(test_path, 'r');
try {
var options = {
starting_position: 0,
byte_size: 256,
check_for_bytes_every_ms: 3000,
no_handler_resolution_timeout_ms: 10000,
};
liveReadAppendingFilePromiser(reader, buffStringHandler, options)
.then(function(did_reader_time_out) {
console.log('reader timed out: ', did_reader_time_out);
console.log(s_list.join(''));
}).catch(function(err) {
console.error('bad stuff: ', err);
}).then(function() {
fs.closeSync(appender);
fs.closeSync(reader);
});
fs.write(appender, '\ncheck it out, I am a string');
fs.write(appender, '\nwho killed kenny');
//fs.write(appender, ending_str);
} catch(err) {
fs.closeSync(reader);
console.log('err1');
throw err;
}
} catch(err) {
fs.closeSync(appender);
console.log('err2');
throw err;
}