Adding [0] causes callback to be called twice? - javascript

I'm writing utility for some minecraft stuff, whatever... So, first of all I have a code that can extract specified files from archive and give there content in callback:
const unzip = require("unzip-stream");
const Volume = require("memfs").Volume;
const mfs = new Volume();
const fs = require("fs");
function getFile(archive, path, cb) {
let called = false;
fs.createReadStream(archive)
.pipe(unzip.Parse())
.on("entry", function(entity) {
if (path.includes(entity.path)) {
entity.pipe(mfs.createWriteStream("/" + path))
.on("close", function() {
mfs.readFile("/" + path, function(err, content) {
if (!called) cb(content);
called = true;
mfs.reset();
});
}).on("err", () => {});
} else {
entity.autodrain();
}
});
}
module.exports = { getFile };
It works perfect when I test it in interactive console:
require("./zip").getFile("minecraft-mod.jar", ["mcmod.info", "cccmod.info"], console.log); // <= Works fine! Calls callback ONCE!
When I started to develop utility using this code I discovered a VERY strange thing.
So I have filenames in files array.
I'm using async/eachSeries to iterate over it. I have no callback function - only iterate one.
I have this code to parse .json files in mods:
let modinfo = Object.create(JSON.parse(content.replace(/(\r\n|\n|\r)/gm,"")));
It also works fine. But here comes magic...
So, .json files can contain array or object. If it's array we need to take first element of it:
if (modinfo[0]) modinfo = modinfo[0];
It works.
But, if it's object we need to take first element of modlist property in in:
else modinfo = modinfo.modlist[0];
And if modinfo was and object boom - callback now fires TWICE! WHAT?
But, if I remove [0] from else condition:
else modinfo = moninfo.modlist; // <= No [0]
Callback will be called ONCE! ???
If I try to do something like this:
if (modinfo[0]) modinfo = modinfo[0];
else {
const x = modinfo.modlist;
modinfo = x[0];
}
Same thing happens...
Also, it's called without arguments.
I tried to investigate - where callback is called twice. Read the zip extractor code again... It has those lines:
This:
let called = false;
And those:
if (!called) cb(content);
called = true;
So, if for some reason even this condition fires up two times:
if (path.includes(entity.path)) {
It should not call callback, right? No! Not only that, but if I try to
console.log(called);
It will log false two times!
NodeJS version: v8.0.0
Full code:
function startSignCheck() {
clear();
const files = fs.readdirSync("../mods");
async.eachSeries(files, function(file, cb) {
console.log("[>]", file);
zip.getFile("../mods/" + file, ["mcmod.info", "cccmod.info"], function(content) {
console.log(content);
console.log(Buffer.isBuffer(content));
if (content != undefined) content = content.toString();
if (!content) return cb();
let modinfo = Object.create(JSON.parse(content.replace(/(\r\n|\n|\r)/gm, "")));
if (modinfo[0]) modinfo = modinfo[0];
else modinfo = modinfo.modlist[0];
//if (!modinfo.name) return cb();
/*curse.searchMod(modinfo.name, modinfo.version, curse.versions[modinfo.mcversion], function(link) {
if (!link) return cb();
signature.generateMD5("../mods/" + file, function(localSignature) {
signature.URLgenerateMD5(link, function(curseSignature) {
if (localSignature === curseSignature) {
console.log(file, "- Подпись верна".green);
} else {
console.log(file.bgWhite.red + " - Подпись неверна".bgWhite.red);
}
cb();
});
});
});*/
});
});
}
Example contents of mcmod.info is:
{
"modListVersion": 2,
"modList": [{
"modid": "journeymap",
"name": "JourneyMap",
"description": "JourneyMap Unlimited Edition: Real-time map in-game or in a web browser as you explore.",
"version": "1.7.10-5.1.4p2",
"mcversion": "1.7.10",
"url": "http://journeymap.info",
"updateUrl": "",
"authorList": ["techbrew", "mysticdrew"],
"logoFile": "assets/journeymap/web/img/ico/journeymap144.png",
"screenshots": [],
"dependants":[],
"dependencies": ["Forge#[10.13.4.1558,)"],
"requiredMods": ["Forge#[10.13.4.1558,)"],
"useDependencyInformation": true
}]
}

Problem was that I was using modlist instead of modList. Not it works! Thanks for solution, Barmar

Related

JS - How to retrieve variable after IndexedDB transaction.oncomplete() executes?

My problem is simple, but incredibly frustrating as I'm now on my second week of trying to figure this out and on the verge of giving up. I would like to retrieve my 'notesObject' variable outside my getAllNotes() function when after the transaction.oncomplete() listener executes.
(function() {
// check for IndexedDB support
if (!window.indexedDB) {
console.log(`Your browser doesn't support IndexedDB`);
return;
}
// open the CRM database with the version 1
let request = indexedDB.open('Notes', 1);
// create the Contacts object store and indexes
request.onupgradeneeded = (event) => {
let db = event.target.result;
// create the Notes object store ('table')
let store = db.createObjectStore('Notes', {
autoIncrement: true
});
// create an index on the sections property.
let index = store.createIndex('Sections', 'sections', {
unique: true
});
}
function insertData() {
let myDB = indexedDB.open('Notes');
myDB.onsuccess = (event) => {
// myDB.transaction('Notes', 'readwrite')
event.target.result.transaction('Notes', 'readwrite')
.objectStore('Notes')
.put({
sections: "New Note",
pages: "New page",
lastSelectedPage: ""
});
console.log("insert successful");
}
myDB.onerror = (event) => {
console.log('Error in NotesDB - insertData(): ' + event.target.errorCode);
}
myDB.oncomplete = (event) => {
myDB.close();
console.log('closed');
}
}
insertData()
function getAllNotes() {
let myDB = indexedDB.open('Notes');
let notesObject = [];
myDB.onsuccess = (event) => {
let dbObjectStore = event.target.result
.transaction("Notes", "readwrite").objectStore("Notes");
dbObjectStore.openCursor().onsuccess = (e) => {
let cursor = e.target.result;
if (cursor) {
let primaryKey = cursor.key;
let section = cursor.value.sections;
notesObject.push({
primaryKey,
section
})
cursor.continue();
}
}
dbObjectStore.transaction.onerror = (event) => {
console.log('Error in NotesDB - getAllData() tranaction: ' + event.target.errorCode);
}
dbObjectStore.transaction.oncomplete = (event) => {
return notesObject;
console.log(notesObject)
}
}
}
let notes = getAllNotes()
console.log("Getting Notes sucessful: " + notes)
})()
I've tried setting global variables, but nothing seems to work. I am a complete noob and honestly, I'm completely lost on how to retrieve the notesObject variable outside my getAllNotes() function. The results I get are 'undefined'. Any help would be greatly appreciated.
This is effectively a duplicate of Indexeddb: return value after openrequest.onsuccess
The operations getAllNotes() kicks off are asynchronous (they will run in the background and take time to complete), whereas your final console.log() call is run synchronously, immediately after getAllNotes(). The operations haven't completed at the time that is run, so there's nothing to log.
If you search SO for "indexeddb asynchronous" you'll find plenty of questions and answers about this topic.

Node.js: Exported function "is not a function" in one js file, but not another

So one of my files has a function that I need two of my other files to access. I am exporting this function with module.exports = {}. For some reason, one of my files is able to call this function, while I get a commands.commandRouter is not a function error when trying to call it from the other file. Here's basically what I've got going on:
commands.js
function commandRouter(commandName, commandType) {
if (commandType == 'sound') {
console.log(`${commandName} is a sound command, executing sound function`)
playAudio.playAudio(commandName.substring(1));
}
}
module.exports = {commandRouter}
main.js
const commands = require('./modules/commands.js');
const secondary = require('./modules/secondary.js');
client.on('message', (channel, tags, message, self) => {
if(message.charAt(0) == '!'){
console.log('Trigger character identified');
if(commands.commandList.hasOwnProperty(message.toLowerCase())) {
console.log('Valid command identified')
if (commands.commandList[`${message}`] == 'random' ) {
console.log('Random-type command identified')
secondary.randomSelectPropery(message.toLowerCase().substring(1));
}
else
{
console.log('Regular command identified')
commands.commandRouter(message, commands.commandList[`${message}`]);
}
}
}
}
commands.commandRouter(paramA, paramB); works just fine in this instance
secondary.js
const commands = require('./commands.js');
var randomSelectPropery = function (commandObject) {
randomObject = eval(commandObject);
var keys = Object.keys(randomObject);
console.log(randomObject)
console.log(`This object has ${keys.length} commands to choose from`);
var newCommandName = Object.getOwnPropertyNames(randomObject)[keys.length * Math.random() << 0];
console.log(newCommandName)
var newCommandType = randomObject[`${newCommandName}`]
console.log(newCommandType);
commands.commandRouter(newCommandName, newCommandType);
};
const perfect = {
"!perfectqube": "sound",
"!perfectsf2": "sound",
};
module.exports = { perfect, randomSelectPropery }
Here, commands.commandRouter(paramA, paramB); give the commands.commandRouter is not a function error.
File structure is as follows:
.\folder\main.js
.\folder\modules\commands.js
.\folder\modules\secondary.js

Waiting for multiple async operations in Nightwatch.js

I am attempting to test multiple sites for section headers being in the correct order. Of course everything is asynchronous in Nightwatch, including getting text from an element. The following code leads to the timeout never being called.
client.url(`www.oneofmyrealestatesites.com`);
client.waitForElementPresent("body", 5000);
var _ = require("underscore");
// This is the order I expect things to be in
var expected = ["Homes For Sale", "New Homes", "Apartments & Rentals"];
client.elements("css selector", ".listings .module-title .label", function (data) {
var listings = [];
data.value.forEach(function (element) {
client.elementIdText(element.ELEMENT, function (result) {
listings.push(result.value);
})
})
setTimeout(function () {
// Some of the sites have extra sections
var diff = _.intersection(listings, expected);
client.assert.ok(listings == diff);
}, 5000);
});
It would appear that no matter how much delay I give, listings is ALWAYS empty. If I console.log listings as it's being pushed to, it is getting populated, so that's not the issue. client.pause is always ignored as well.
Is there a way to make sure that listings is populated before asserting the diff?
I'm using async library for such cases https://github.com/caolan/async
Docs: https://github.com/caolan/async/blob/v1.5.2/README.md
var async = require("async");
/*use each, eachSeries or eachLimit - check docs for differences */
async.eachSeries(data.value, function (element, cb) {
client.elementIdText(element.ELEMENT, function (result) {
listings.push(result.value);
// this job is done
cb();
});
}, function() {
// Some of the sites have extra sections
var diff = _.intersection(listings, expected);
client.assert.ok(listings == diff);
});
setTimeout can only be called from .execute or .executeAsync because its actual javascript. The function below was only working until I used .executeAsync
Hope this works for you.
Cheers, Rody
LoopQuestionsLogSymptom: function() {
this.waitForElementVisible('.next-button', constants.timeout.medium, false);
this.api.executeAsync(function() {
let checkQuestion = function() {
// GET THE ELEMENTS
let nextButton = document.querySelectorAll('.next-button');
let answers = document.getElementsByClassName('flex-row');
let blueButton = document.querySelector('.blue-inverse-button');
let doneButton = document.querySelector('#doneButton');
let monitor = document.querySelector('.monitor');
//CHECK THE TYPES OF QUESTIONS AND CLICK THE RIGHT BUTTONS
if (!blueButton) {
answers[0].click();
nextButton[0].click()
} else if(blueButton){
blueButton.click();
}
setTimeout(() => {
if(!doneButton) {
console.log('Answering another question!');
checkQuestion();
}
if(doneButton){
doneButton.click();
}
else if(monitor) {
console.log("Exiting?")
.assert(monitor);
return this;
}
}, 2000);
};
// Initiating the check question function
return checkQuestion();
},[], function(){
console.log('Done?')
});
this.waitForElementVisible('.monitor', constants.timeout.medium);
this.assert.elementPresent('.monitor');
this.assert.urlEquals('https://member-portal.env-dev4.vivantehealth.org/progress');
return this;
},

Uglify JS - compressing unused variables

Uglify has a "compression" option that can remove unused variables...
However, if I stored some functions in an object like this....
helpers = {
doSomething: function () { ... },
doSomethingElese: function () { ... }
}
... is there a way to remove helpers.doSomething() if it's never accessed?
Guess I want to give the compressor permission to change my object.
Any ideas if it's possible? Or any other tools that can help?
Using a static analyzer like Uglify2 or Esprima to accomplish this task is somewhat nontrivial, because there are lots of situations that will call a function that are difficult to determine. To show the complexity, there's this website:
http://sevinf.github.io/blog/2012/09/29/esprima-tutorial/
Which attempts to at least identify unused functions. However the code as provided on that website will not work against your example because it is looking for FunctionDeclarations and not FunctionExpressions. It is also looking for CallExpression's as Identifiers while ignoring CallExpression's that are MemberExpression's as your example uses. There's also a problem of scope there, it doesn't take into account functions in different scopes with the same name - perfectly legal Javascript, but you lose fidelity using that code as it'll miss some unused functions thinking they were called when they were not.
To handle the scope problem, you might be able to employ ESTR (https://github.com/clausreinke/estr), to help figure out the scope of the variables and from there the unused functions. Then you'll need to use something like escodegen to remove the unused functions.
As a starting point for you I've adapted the code on that website to work for your very specific situation provided, but be forwarned, it will have scope issue.
This is written for Node.js, so you'll need to get esprima with npm to use the example as provided, and of course execute it with node.
var fs = require('fs');
var esprima = require('esprima');
if (process.argv.length < 3) {
console.log('Usage: node ' + process.argv[1] + ' <filename>');
process.exit(1);
}
notifydeadcode = function(data){
function traverse(node, func) {
func(node);
for (var key in node) {
if (node.hasOwnProperty(key)) {
var child = node[key];
if (typeof child === 'object' && child !== null) {
if (Array.isArray(child)) {
child.forEach(function(node) {
traverse(node, func);
});
} else {
traverse(child, func);
}
}
}
}
}
function analyzeCode(code) {
var ast = esprima.parse(code);
var functionsStats = {};
var addStatsEntry = function(funcName) {
if (!functionsStats[funcName]) {
functionsStats[funcName] = {calls: 0, declarations:0};
}
};
var pnode = null;
traverse(ast, function(node) {
if (node.type === 'FunctionExpression') {
if(pnode.type == 'Identifier'){
var expr = pnode.name;
addStatsEntry(expr);
functionsStats[expr].declarations++;
}
} else if (node.type === 'FunctionDeclaration') {
addStatsEntry(node.id.name);
functionsStats[node.id.name].declarations++;
} else if (node.type === 'CallExpression' && node.callee.type === 'Identifier') {
addStatsEntry(node.callee.name);
functionsStats[node.callee.name].calls++;
}else if (node.type === 'CallExpression' && node.callee.type === 'MemberExpression'){
var lexpr = node.callee.property.name;
addStatsEntry(lexpr);
functionsStats[lexpr].calls++;
}
pnode = node;
});
processResults(functionsStats);
}
function processResults(results) {
//console.log(JSON.stringify(results));
for (var name in results) {
if (results.hasOwnProperty(name)) {
var stats = results[name];
if (stats.declarations === 0) {
console.log('Function', name, 'undeclared');
} else if (stats.declarations > 1) {
console.log('Function', name, 'decalred multiple times');
} else if (stats.calls === 0) {
console.log('Function', name, 'declared but not called');
}
}
}
}
analyzeCode(data);
}
// Read the file and print its contents.
var filename = process.argv[2];
fs.readFile(filename, 'utf8', function(err, data) {
if (err) throw err;
console.log('OK: ' + filename);
notifydeadcode(data);
});
So if you plop that in a file like deadfunc.js and then call it like so:
node deadfunc.js test.js
where test.js contains:
helpers = {
doSomething:function(){ },
doSomethingElse:function(){ }
};
helpers.doSomethingElse();
You will get the output:
OK: test.js
Function doSomething declared but not called
One last thing to note: attempting to find unused variables and functions might be a rabbit hole because you have situations like eval and Functions created from strings. You also have to think about apply and call etc, etc. Which is why, I assume, we don't have this capability in the static analyzers today.

Reading a file in real-time using Node.js

I need to work out the best way to read data that is being written to a file, using node.js, in real time. Trouble is, Node is a fast moving ship which makes finding the best method for addressing a problem difficult.
What I Want To Do
I have a java process that is doing something and then writing the results of this thing it does to a text file. It typically takes anything from 5 mins to 5 hours to run, with data being written the whole time, and can get up to some fairly hefty throughput rates (circa. 1000 lines/sec).
I would like to read this file, in real time, and then, using node aggregate the data and write it to a socket where it can be graphed on the client.
The client, graphs, sockets and aggregation logic are all done but I am confused about the best approach for reading the file.
What I Have Tried (or at least played with)
FIFO - I can tell my Java process to write to a fifo and read this using node, this is in fact how we have this currently implemted using Perl, but because everything else is running in node it makes sense to port the code over.
Unix Sockets - As above.
fs.watchFile - will this work for what we need?
fs.createReadStream - is this better than watchFile?
fs & tail -f - seems like a hack.
What, actually, is my Question
I am tending towards using Unix Sockets, this seems the fastest option. But does node have better built-in features for reading files from the fs in real time?
If you want to keep the file as a persistent store of your data to prevent a loss of stream in case of a system crash or one of the members in your network of running processes dies, you can still continue on writing to a file and reading from it.
If you do not need this file as a persistent storage of produced results from your Java process, then going with a Unix socket is much better for both the ease and also the performance.
fs.watchFile() is not what you need because it works on file stats as filesystem reports it and since you want to read the file as it is already being written, this is not what you want.
SHORT UPDATE: I am very sorry to realize that although I had accused fs.watchFile() for using file stats in previous paragraph, I had done the very same thing myself in my example code below! Although I had already warned readers to "take care!" because I had written it in just a few minutes without even testing well; still, it can be done better by using fs.watch() instead of watchFile or fstatSync if underlying system supports it.
For reading/writing from a file, I have just written below for fun in my break:
test-fs-writer.js: [You will not need this since you write file in your Java process]
var fs = require('fs'),
lineno=0;
var stream = fs.createWriteStream('test-read-write.txt', {flags:'a'});
stream.on('open', function() {
console.log('Stream opened, will start writing in 2 secs');
setInterval(function() { stream.write((++lineno)+' oi!\n'); }, 2000);
});
test-fs-reader.js: [Take care, this is just demonstration, check err objects!]
var fs = require('fs'),
bite_size = 256,
readbytes = 0,
file;
fs.open('test-read-write.txt', 'r', function(err, fd) { file = fd; readsome(); });
function readsome() {
var stats = fs.fstatSync(file); // yes sometimes async does not make sense!
if(stats.size<readbytes+1) {
console.log('Hehe I am much faster than your writer..! I will sleep for a while, I deserve it!');
setTimeout(readsome, 3000);
}
else {
fs.read(file, new Buffer(bite_size), 0, bite_size, readbytes, processsome);
}
}
function processsome(err, bytecount, buff) {
console.log('Read', bytecount, 'and will process it now.');
// Here we will process our incoming data:
// Do whatever you need. Just be careful about not using beyond the bytecount in buff.
console.log(buff.toString('utf-8', 0, bytecount));
// So we continue reading from where we left:
readbytes+=bytecount;
process.nextTick(readsome);
}
You can safely avoid using nextTick and call readsome() directly instead. Since we are still working sync here, it is not necessary in any sense. I just like it. :p
EDIT by Oliver Lloyd
Taking the example above but extending it to read CSV data gives:
var lastLineFeed,
lineArray;
function processsome(err, bytecount, buff) {
lastLineFeed = buff.toString('utf-8', 0, bytecount).lastIndexOf('\n');
if(lastLineFeed > -1){
// Split the buffer by line
lineArray = buff.toString('utf-8', 0, bytecount).slice(0,lastLineFeed).split('\n');
// Then split each line by comma
for(i=0;i<lineArray.length;i++){
// Add read rows to an array for use elsewhere
valueArray.push(lineArray[i].split(','));
}
// Set a new position to read from
readbytes+=lastLineFeed+1;
} else {
// No complete lines were read
readbytes+=bytecount;
}
process.nextTick(readFile);
}
Why do you think tail -f is a hack?
While figuring out I found a good example I would do something similar.
Real time online activity monitor example with node.js and WebSocket:
http://blog.new-bamboo.co.uk/2009/12/7/real-time-online-activity-monitor-example-with-node-js-and-websocket
Just to make this answer complete, I wrote you an example code which would run under 0.8.0 - (the http server is a hack maybe).
A child process is spawned running with tail, and since a child process is an EventEmitter with three streams (we use stdout in our case) you can just add the a listener with on
filename: tailServer.js
usage: node tailServer /var/log/filename.log
var http = require("http");
var filename = process.argv[2];
if (!filename)
return console.log("Usage: node tailServer filename");
var spawn = require('child_process').spawn;
var tail = spawn('tail', ['-f', filename]);
http.createServer(function (request, response) {
console.log('request starting...');
response.writeHead(200, {'Content-Type': 'text/plain' });
tail.stdout.on('data', function (data) {
response.write('' + data);
});
}).listen(8088);
console.log('Server running at http://127.0.0.1:8088/');
this module is an implementation of the principle #hasanyasin suggests:
https://github.com/felixge/node-growing-file
I took the answer from #hasanyasin and wrapped it up into a modular promise. The basic idea is that you pass a file and a handler function that does something with the stringified-buffer that is read from the file. If the handler function returns true, then the file will stop being read. You can also set a timeout that will kill reading if the handler doesn't return true fast enough.
The promiser will return true if the resolve() was called due to timeout, otherwise it will return false.
See the bottom for usage example.
// https://stackoverflow.com/a/11233045
var fs = require('fs');
var Promise = require('promise');
class liveReaderPromiseMe {
constructor(file, buffStringHandler, opts) {
/*
var opts = {
starting_position: 0,
byte_size: 256,
check_for_bytes_every_ms: 3000,
no_handler_resolution_timeout_ms: null
};
*/
if (file == null) {
throw new Error("file arg must be present");
} else {
this.file = file;
}
if (buffStringHandler == null) {
throw new Error("buffStringHandler arg must be present");
} else {
this.buffStringHandler = buffStringHandler;
}
if (opts == null) {
opts = {};
}
if (opts.starting_position == null) {
this.current_position = 0;
} else {
this.current_position = opts.starting_position;
}
if (opts.byte_size == null) {
this.byte_size = 256;
} else {
this.byte_size = opts.byte_size;
}
if (opts.check_for_bytes_every_ms == null) {
this.check_for_bytes_every_ms = 3000;
} else {
this.check_for_bytes_every_ms = opts.check_for_bytes_every_ms;
}
if (opts.no_handler_resolution_timeout_ms == null) {
this.no_handler_resolution_timeout_ms = null;
} else {
this.no_handler_resolution_timeout_ms = opts.no_handler_resolution_timeout_ms;
}
}
startHandlerTimeout() {
if (this.no_handler_resolution_timeout_ms && (this._handlerTimer == null)) {
var that = this;
this._handlerTimer = setTimeout(
function() {
that._is_handler_timed_out = true;
},
this.no_handler_resolution_timeout_ms
);
}
}
clearHandlerTimeout() {
if (this._handlerTimer != null) {
clearTimeout(this._handlerTimer);
this._handlerTimer = null;
}
this._is_handler_timed_out = false;
}
isHandlerTimedOut() {
return !!this._is_handler_timed_out;
}
fsReadCallback(err, bytecount, buff) {
try {
if (err) {
throw err;
} else {
this.current_position += bytecount;
var buff_str = buff.toString('utf-8', 0, bytecount);
var that = this;
Promise.resolve().then(function() {
return that.buffStringHandler(buff_str);
}).then(function(is_handler_resolved) {
if (is_handler_resolved) {
that.resolve(false);
} else {
process.nextTick(that.doReading.bind(that));
}
}).catch(function(err) {
that.reject(err);
});
}
} catch(err) {
this.reject(err);
}
}
fsRead(bytecount) {
fs.read(
this.file,
new Buffer(bytecount),
0,
bytecount,
this.current_position,
this.fsReadCallback.bind(this)
);
}
doReading() {
if (this.isHandlerTimedOut()) {
return this.resolve(true);
}
var max_next_bytes = fs.fstatSync(this.file).size - this.current_position;
if (max_next_bytes) {
this.fsRead( (this.byte_size > max_next_bytes) ? max_next_bytes : this.byte_size );
} else {
setTimeout(this.doReading.bind(this), this.check_for_bytes_every_ms);
}
}
promiser() {
var that = this;
return new Promise(function(resolve, reject) {
that.resolve = resolve;
that.reject = reject;
that.doReading();
that.startHandlerTimeout();
}).then(function(was_resolved_by_timeout) {
that.clearHandlerTimeout();
return was_resolved_by_timeout;
});
}
}
module.exports = function(file, buffStringHandler, opts) {
try {
var live_reader = new liveReaderPromiseMe(file, buffStringHandler, opts);
return live_reader.promiser();
} catch(err) {
return Promise.reject(err);
}
};
Then use the above code like this:
var fs = require('fs');
var path = require('path');
var Promise = require('promise');
var liveReadAppendingFilePromiser = require('./path/to/liveReadAppendingFilePromiser');
var ending_str = '_THIS_IS_THE_END_';
var test_path = path.join('E:/tmp/test.txt');
var s_list = [];
var buffStringHandler = function(s) {
s_list.push(s);
var tmp = s_list.join('');
if (-1 !== tmp.indexOf(ending_str)) {
// if this return never occurs, then the file will be read until no_handler_resolution_timeout_ms
// by default, no_handler_resolution_timeout_ms is null, so read will continue forever until this function returns something that evaluates to true
return true;
// you can also return a promise:
// return Promise.resolve().then(function() { return true; } );
}
};
var appender = fs.openSync(test_path, 'a');
try {
var reader = fs.openSync(test_path, 'r');
try {
var options = {
starting_position: 0,
byte_size: 256,
check_for_bytes_every_ms: 3000,
no_handler_resolution_timeout_ms: 10000,
};
liveReadAppendingFilePromiser(reader, buffStringHandler, options)
.then(function(did_reader_time_out) {
console.log('reader timed out: ', did_reader_time_out);
console.log(s_list.join(''));
}).catch(function(err) {
console.error('bad stuff: ', err);
}).then(function() {
fs.closeSync(appender);
fs.closeSync(reader);
});
fs.write(appender, '\ncheck it out, I am a string');
fs.write(appender, '\nwho killed kenny');
//fs.write(appender, ending_str);
} catch(err) {
fs.closeSync(reader);
console.log('err1');
throw err;
}
} catch(err) {
fs.closeSync(appender);
console.log('err2');
throw err;
}

Categories

Resources