GJS read file synchronously - javascript

I'm trying to use GJS and more precisely
to read a text file in a synchronous way.
Here is an example an the asynchronous function for file reading
gio-cat.js
I found how to proceed with seed using the next function:
function readFile(filename) {
print(filename);
var input_file = gio.file_new_for_path(filename);
var fstream = input_file.read();
var dstream = new gio.DataInputStream.c_new(fstream);
var data = dstream.read_until("", 0);
fstream.close();
return data;
}
but unfortunately, it doesn't work with GJS.
Can anyone help me ?

GLib has the helper function GLib.file_get_contents(String fileName) to read files synchronously:
const GLib = imports.gi.GLib;
//...
let fileContents = String(GLib.file_get_contents("/path/to/yourFile")[1]);

Here is a solution that works with just Gio.
function readFile(filename) {
let input_file = Gio.file_new_for_path(filename);
let size = input_file.query_info(
"standard::size",
Gio.FileQueryInfoFlags.NONE,
null).get_size();
let stream = input_file.open_readwrite(null).get_input_stream();
let data = stream.read_bytes(size, null).get_data();
stream.close(null);
return data;
}

As I use GJS for developing Cinnamon applets, I used to use the get_file_contents_utf8_sync function to read text files :
const Cinnamon = imports.gi.Cinnamon;
let fileContent = Cinnamon.get_file_contents_utf8_sync("file path");
If you have Cinnamon installed and you agree to use it, it answers your question.
Otherwise here is the C code of the get_file_contents_utf8_sync function in cinnamon-util.c, hoping this will help you:
char * cinnamon_get_file_contents_utf8_sync (const char *path, GError **error)
{
char *contents;
gsize len;
if (!g_file_get_contents (path, &contents, &len, error))
return NULL;
if (!g_utf8_validate (contents, len, NULL))
{
g_free (contents);
g_set_error (error,
G_IO_ERROR,
G_IO_ERROR_FAILED,
"File %s contains invalid UTF-8",
path);
return NULL;
}
return contents;
}
Cinnamon source code

Try replacing
new gio.DataInputStream.c_new(fstream);
with
gio.DataInputStream.new(fstream);
it worked for me

Related

code after var fs = require('fs') not running

Why does my simple jquery not run after the var fs = require('fs') code.
This runs correctly
//here is my simple jquery
$("#table th").css("color", "yellow");
//here starts the fs code
var fs = require('fs');
var output = fs.readFileSync('component names.txt', 'utf8').replace(/(\r)/gm, "").split('\n').map((line) => {
let [Eng, Spa, ger] = line.split('\t');
return {
Eng,
Spa,
ger
};
});
var Eng = output.map(item => item.Eng);
var Spa = output.map(item => item.Spa);
However if I put the $("#table th").css("color", "yellow") at the bottom, where I want it because I'm going to populate a table, it doesn't work here:
var fs = require('fs');
var output = fs.readFileSync('component names.txt', 'utf8').replace(/(\r)/gm, "").split('\n').map((line) => {
let [Eng, Spa, ger] = line.split('\t');
return {
Eng,
Spa,
ger
};
});
var Eng = output.map(item => item.Eng);
var Spa = output.map(item => item.Spa);
//jquery
$("#table th").css("color", "yellow");
Why?
I would suspect it having something to do with the readFileSync function.
Try wrapping it with a try catch, see if it catches anything.
You said that the first code works, but I think you were referring to the jquery code. I believe that the fs code is not functioning properly.
try {
var output = fs.readFileSync('component names.txt', 'utf8').replace(/(\r)/gm, "").split('\n').map((line) => {
let [Eng, Spa, ger] = line.split('\t');
return {
Eng,
Spa,
ger
};
});
var Eng = output.map(item => item.Eng);
var Spa = output.map(item => item.Spa);
} catch(e) {
console.error(e) //see what's going on here
}
If you do this, the jquery code will work, because whatever error is happening will be caught and ignored, so it's very important to debug the code to not run into any future problems

Best practice to handle undefined variables dynamicaly in JavaScript/Nodejs

Ok, maybe is not the best title, but I lacked inspiration, so here goes:
Let's say you have a "global" (not really) variable to store temporary data and sub data as random users interact with your server. Normally on the first interaction with your server, the main variable will be undefined so you need to handle that case.
Now, what puzzled me about this, is what's the best practice performance wise to do this if there are a lot of users and a lot way more interactions with the variable.
Puzzled? Yeah, I know, words are not my strong point so let me show you in code
So you have
var user_data = [];
Then a function that handles user interaction to store data
function writeData(uid, data_name, data)
Now, on first interaction, user_data[uid][data_name] is undefined, and so it's user_data[uid]
I know you can handle this 2 ways:
With if -
if(!user_data[uid]) user_data[uid] = {}
user_data[uid][data_name] = data
With try/catch
try{user_data[uid][data_name] = data}
catch(e) {user_data[uid] = {}; writeData(uid, data_name, data)}
The if will check on every interaction, and like I said there are a lot.
Try catch will trigger once, but it has a cost as a block (afaik)
Which one is better? Or is there a another better way
#Nertan ,
There is a partiality in your proof :P . I have slightly tweeked the ternary way (same as the order of execution in if way). With this you can conclude.
//var present = require('present');
function test(val,ud,fun) {
var k = 10000000;
var t = Date.now();
for(var i=0; i<k;i++)
{
var uid = Math.ceil(Math.random()*1000);
fun(uid,ud,"value");
}
var tf = Date.now()-t;
return tf;
}
function setValue_Opp(uid,ud,value)
{
(!ud[uid] && (ud[uid] = {})) && (ud[uid].value = value);
}
function setValue_Try(uid,ud,value)
{
try{ ud[uid].value = value}
catch(e){ ud[uid] = {}; setValue_Try(uid,ud,value)};
}
function setValue_Cond(uid,ud,value)
{
if(!ud[uid]) ud[uid] = {}
ud[uid].value = value;
}
var k1=0;
var k2=0;
var k3=0;
for(var i=0;i<10;i++){
k1+=test(1,{}, setValue_Cond);
k2+=test(2,{}, setValue_Try);
k3+=test(3,{}, setValue_Opp);
}
console.log(k1,k2,k3)
I feel we can take advantage of ES6 ternaries as below:
let user_data = {}
const writeData = (uid, data_name, data) => {
((user_data[uid] || (user_data[uid] = {})) && (user_data[uid][data_name] = data ))
console.log(user_data)
// perform write action
}
writeData('1',"test","test1");
writeData('2',"test","test2");
writeData('1',"test","test3");
Ok, so I had to rewrite the test because it doesn't work fine in the Snippet
So I made this for node.js:
var present = require('present');
function test(val,ud,fun) {
var k = 10000000;
var t = present();
for(var i=0; i<k;i++)
{
var uid = Math.ceil(Math.random()*1000);
fun(uid,ud,"value");
}
var tf = present()-t;
console.log("END "+val+" at "+tf);
return tf;
}
function setValue_Opp(uid,ud,value)
{
(ud[uid] || (ud[uid] = {})) && (ud[uid].value = value);
}
function setValue_Try(uid,ud,value)
{
try{ ud[uid].value = value}
catch(e){ ud[uid] = {}; setValue_Try(uid,ud,value)};
}
function setValue_Cond(uid,ud,value)
{
if(!ud[uid]) ud[uid] = {}
ud[uid].value = value;
}
var k1=0;
var k2=0;
var k3=0;
for(var i=0;i<10;i++){
k1+=test(1,{}, setValue_Cond);
k2+=test(2,{}, setValue_Try);
k3+=test(3,{}, setValue_Opp);
}
console.log(k1,k2,k3)
And in the end:
3244.328997004777 3695.0267750024796 3437.6855720058084
Which means:
The best is the classical if
The second best is condintional operators method
And the worst is the try-catch
So it seems the classics win
Edited:
With further tests thanks to #CRayen the best method is :
(!ud[uid] && (ud[uid] = {})) && (ud[uid].value = value);

Access to filesystem from WinDBG's JavaScript script

I'm currently playing with WinDBG script written in JavaScript as it's described by Microsoft.
How can I access the filesystem from within the JavaScript code? I'm interested in both reading and writing to files located somewhere on disk. For JavaScript executing on the browsers these features are disabled because of security reasons but for example NodeJS has its own libraries to support filesystem operations.
This works:
"use strict";
function invokeScript() {
var debugControl = host.namespace.Debugger.Utility.Control;
var output = debugControl.ExecuteCommand("vertarget");
writeOutputToFile(output);
}
function writeOutputToFile(output) {
var logFilePath = "c:\\debugging\\output\\output.log";
var logFile;
if (host.namespace.Debugger.Utility.FileSystem.FileExists(logFilePath)) {
logFile = host.namespace.Debugger.Utility.FileSystem.CreateFile(logFilePath, "OpenExisting");
}
else {
logFile = host.namespace.Debugger.Utility.FileSystem.CreateFile(logFilePath);
}
var textWriter = host.namespace.Debugger.Utility.FileSystem.CreateTextWriter(logFile, "Utf16");
try {
for (var line of output) {
textWriter.WriteLine(line);
}
}
finally {
logFile.Close();
}
}
I tried File, Blob and ActiveXObject as suggested throughout the Internet, but none of them works in WinDbg.
You could try a combination of .dvalloc + .writemem + .dvfree. Below is a starting point, but far from complete:
function saveTextAsFile()
{
var dbgOut = host.diagnostics.debugLog;
var exec = host.namespace.Debugger.Utility.Control.ExecuteCommand;
var output = exec(".dvalloc 0x10000");
for (var line of output)
{
dbgOut("Output: "+line+"\n");
var index = line.indexOf("starting at ");
var address = line.substring(index+("starting at ".length));
dbgOut("Allocated memory at "+address+"\n");
exec(".writemem f:\\debug\\logs\\fromscript.txt "+address+" L10000")
var output = exec(".dvfree " + address + " 0x10000");
break;
}
}

Convert a text from text file to array with fs [node js]

I have a txt file contains:
{"date":"2013/06/26","statement":"insert","nombre":1}
{"date":"2013/06/26","statement":"insert","nombre":1}
{"date":"2013/06/26","statement":"select","nombre":4}
how I can convert the contents of the text file as array such as:
statement = [
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"select","nombre":4}, ];
I use the fs module node js. Thanks
Sorry
I will explain more detailed:
I have an array :
st = [
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"insert","nombre":5},
{"date":"2013/06/26","statement":"select","nombre":4},
];
if I use this code :
var arr = new LINQ(st)
.OrderBy(function(x) {return x.nombre;})
.Select(function(x) {return x.statement;})
.ToArray();
I get the result I want.
insert select insert
but the problem my data is in a text file.
any suggestion and thanks again.
There is no reason for not to do your file parser yourself. This will work on any size of a file:
var fs = require('fs');
var fileStream = fs.createReadStream('file.txt');
var data = "";
fileStream.on('readable', function() {
//this functions reads chunks of data and emits newLine event when \n is found
data += fileStream.read();
while( data.indexOf('\n') >= 0 ){
fileStream.emit('newLine', data.substring(0,data.indexOf('\n')));
data = data.substring(data.indexOf('\n')+1);
}
});
fileStream.on('end', function() {
//this functions sends to newLine event the last chunk of data and tells it
//that the file has ended
fileStream.emit('newLine', data , true);
});
var statement = [];
fileStream.on('newLine',function(line_of_text, end_of_file){
//this is the code where you handle each line
// line_of_text = string which contains one line
// end_of_file = true if the end of file has been reached
statement.push( JSON.parse(line_of_text) );
if(end_of_file){
console.dir(statement);
//here you have your statement object ready
}
});
If it's a small file, you might get away with something like this:
// specifying the encoding means you don't have to do `.toString()`
var arrayOfThings = fs.readFileSync("./file", "utf8").trim().split(/[\r\n]+/g).map(function(line) {
// this try/catch will make it so we just return null
// for any lines that don't parse successfully, instead
// of throwing an error.
try {
return JSON.parse(line);
} catch (e) {
return null;
}
// this .filter() removes anything that didn't parse correctly
}).filter(function(object) {
return !!object;
});
If it's larger, you might want to consider reading it in line-by-line using any one of the many modules on npm for consuming lines from a stream.
Wanna see how to do it with streams? Let's see how we do it with streams. This isn't a practical example, but it's fun anyway!
var stream = require("stream"),
fs = require("fs");
var LineReader = function LineReader(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
this._buffer = "";
};
LineReader.prototype = Object.create(stream.Transform.prototype, {constructor: {value: LineReader}});
LineReader.prototype._transform = function _transform(input, encoding, done) {
if (Buffer.isBuffer(input)) {
input = input.toString("utf8");
}
this._buffer += input;
var lines = this._buffer.split(/[\r\n]+/);
this._buffer = lines.pop();
for (var i=0;i<lines.length;++i) {
this.push(lines[i]);
}
return done();
};
LineReader.prototype._flush = function _flush(done) {
if (this._buffer.length) {
this.push(this._buffer);
}
return done();
};
var JSONParser = function JSONParser(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
};
JSONParser.prototype = Object.create(stream.Transform.prototype, {constructor: {value: JSONParser}});
JSONParser.prototype._transform = function _transform(input, encoding, done) {
try {
input = JSON.parse(input);
} catch (e) {
return done(e);
}
this.push(input);
return done();
};
var Collector = function Collector(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
this._entries = [];
};
Collector.prototype = Object.create(stream.Transform.prototype, {constructor: {value: Collector}});
Collector.prototype._transform = function _transform(input, encoding, done) {
this._entries.push(input);
return done();
};
Collector.prototype._flush = function _flush(done) {
this.push(this._entries);
return done();
};
fs.createReadStream("./file").pipe(new LineReader()).pipe(new JSONParser()).pipe(new Collector()).on("readable", function() {
var results = this.read();
console.log(results);
});
fs.readFileSync("myfile.txt").toString().split(/[\r\n]/)
This gets your each line as a string
You can then use UnderscoreJS or your own for loop to apply the JSON.parse("your json string") method to each element of the array.
var arr = fs.readFileSync('mytxtfile', 'utf-8').split('\n')
I think this is the simplest way of creating an array from your text file

Reading a file in real-time using Node.js

I need to work out the best way to read data that is being written to a file, using node.js, in real time. Trouble is, Node is a fast moving ship which makes finding the best method for addressing a problem difficult.
What I Want To Do
I have a java process that is doing something and then writing the results of this thing it does to a text file. It typically takes anything from 5 mins to 5 hours to run, with data being written the whole time, and can get up to some fairly hefty throughput rates (circa. 1000 lines/sec).
I would like to read this file, in real time, and then, using node aggregate the data and write it to a socket where it can be graphed on the client.
The client, graphs, sockets and aggregation logic are all done but I am confused about the best approach for reading the file.
What I Have Tried (or at least played with)
FIFO - I can tell my Java process to write to a fifo and read this using node, this is in fact how we have this currently implemted using Perl, but because everything else is running in node it makes sense to port the code over.
Unix Sockets - As above.
fs.watchFile - will this work for what we need?
fs.createReadStream - is this better than watchFile?
fs & tail -f - seems like a hack.
What, actually, is my Question
I am tending towards using Unix Sockets, this seems the fastest option. But does node have better built-in features for reading files from the fs in real time?
If you want to keep the file as a persistent store of your data to prevent a loss of stream in case of a system crash or one of the members in your network of running processes dies, you can still continue on writing to a file and reading from it.
If you do not need this file as a persistent storage of produced results from your Java process, then going with a Unix socket is much better for both the ease and also the performance.
fs.watchFile() is not what you need because it works on file stats as filesystem reports it and since you want to read the file as it is already being written, this is not what you want.
SHORT UPDATE: I am very sorry to realize that although I had accused fs.watchFile() for using file stats in previous paragraph, I had done the very same thing myself in my example code below! Although I had already warned readers to "take care!" because I had written it in just a few minutes without even testing well; still, it can be done better by using fs.watch() instead of watchFile or fstatSync if underlying system supports it.
For reading/writing from a file, I have just written below for fun in my break:
test-fs-writer.js: [You will not need this since you write file in your Java process]
var fs = require('fs'),
lineno=0;
var stream = fs.createWriteStream('test-read-write.txt', {flags:'a'});
stream.on('open', function() {
console.log('Stream opened, will start writing in 2 secs');
setInterval(function() { stream.write((++lineno)+' oi!\n'); }, 2000);
});
test-fs-reader.js: [Take care, this is just demonstration, check err objects!]
var fs = require('fs'),
bite_size = 256,
readbytes = 0,
file;
fs.open('test-read-write.txt', 'r', function(err, fd) { file = fd; readsome(); });
function readsome() {
var stats = fs.fstatSync(file); // yes sometimes async does not make sense!
if(stats.size<readbytes+1) {
console.log('Hehe I am much faster than your writer..! I will sleep for a while, I deserve it!');
setTimeout(readsome, 3000);
}
else {
fs.read(file, new Buffer(bite_size), 0, bite_size, readbytes, processsome);
}
}
function processsome(err, bytecount, buff) {
console.log('Read', bytecount, 'and will process it now.');
// Here we will process our incoming data:
// Do whatever you need. Just be careful about not using beyond the bytecount in buff.
console.log(buff.toString('utf-8', 0, bytecount));
// So we continue reading from where we left:
readbytes+=bytecount;
process.nextTick(readsome);
}
You can safely avoid using nextTick and call readsome() directly instead. Since we are still working sync here, it is not necessary in any sense. I just like it. :p
EDIT by Oliver Lloyd
Taking the example above but extending it to read CSV data gives:
var lastLineFeed,
lineArray;
function processsome(err, bytecount, buff) {
lastLineFeed = buff.toString('utf-8', 0, bytecount).lastIndexOf('\n');
if(lastLineFeed > -1){
// Split the buffer by line
lineArray = buff.toString('utf-8', 0, bytecount).slice(0,lastLineFeed).split('\n');
// Then split each line by comma
for(i=0;i<lineArray.length;i++){
// Add read rows to an array for use elsewhere
valueArray.push(lineArray[i].split(','));
}
// Set a new position to read from
readbytes+=lastLineFeed+1;
} else {
// No complete lines were read
readbytes+=bytecount;
}
process.nextTick(readFile);
}
Why do you think tail -f is a hack?
While figuring out I found a good example I would do something similar.
Real time online activity monitor example with node.js and WebSocket:
http://blog.new-bamboo.co.uk/2009/12/7/real-time-online-activity-monitor-example-with-node-js-and-websocket
Just to make this answer complete, I wrote you an example code which would run under 0.8.0 - (the http server is a hack maybe).
A child process is spawned running with tail, and since a child process is an EventEmitter with three streams (we use stdout in our case) you can just add the a listener with on
filename: tailServer.js
usage: node tailServer /var/log/filename.log
var http = require("http");
var filename = process.argv[2];
if (!filename)
return console.log("Usage: node tailServer filename");
var spawn = require('child_process').spawn;
var tail = spawn('tail', ['-f', filename]);
http.createServer(function (request, response) {
console.log('request starting...');
response.writeHead(200, {'Content-Type': 'text/plain' });
tail.stdout.on('data', function (data) {
response.write('' + data);
});
}).listen(8088);
console.log('Server running at http://127.0.0.1:8088/');
this module is an implementation of the principle #hasanyasin suggests:
https://github.com/felixge/node-growing-file
I took the answer from #hasanyasin and wrapped it up into a modular promise. The basic idea is that you pass a file and a handler function that does something with the stringified-buffer that is read from the file. If the handler function returns true, then the file will stop being read. You can also set a timeout that will kill reading if the handler doesn't return true fast enough.
The promiser will return true if the resolve() was called due to timeout, otherwise it will return false.
See the bottom for usage example.
// https://stackoverflow.com/a/11233045
var fs = require('fs');
var Promise = require('promise');
class liveReaderPromiseMe {
constructor(file, buffStringHandler, opts) {
/*
var opts = {
starting_position: 0,
byte_size: 256,
check_for_bytes_every_ms: 3000,
no_handler_resolution_timeout_ms: null
};
*/
if (file == null) {
throw new Error("file arg must be present");
} else {
this.file = file;
}
if (buffStringHandler == null) {
throw new Error("buffStringHandler arg must be present");
} else {
this.buffStringHandler = buffStringHandler;
}
if (opts == null) {
opts = {};
}
if (opts.starting_position == null) {
this.current_position = 0;
} else {
this.current_position = opts.starting_position;
}
if (opts.byte_size == null) {
this.byte_size = 256;
} else {
this.byte_size = opts.byte_size;
}
if (opts.check_for_bytes_every_ms == null) {
this.check_for_bytes_every_ms = 3000;
} else {
this.check_for_bytes_every_ms = opts.check_for_bytes_every_ms;
}
if (opts.no_handler_resolution_timeout_ms == null) {
this.no_handler_resolution_timeout_ms = null;
} else {
this.no_handler_resolution_timeout_ms = opts.no_handler_resolution_timeout_ms;
}
}
startHandlerTimeout() {
if (this.no_handler_resolution_timeout_ms && (this._handlerTimer == null)) {
var that = this;
this._handlerTimer = setTimeout(
function() {
that._is_handler_timed_out = true;
},
this.no_handler_resolution_timeout_ms
);
}
}
clearHandlerTimeout() {
if (this._handlerTimer != null) {
clearTimeout(this._handlerTimer);
this._handlerTimer = null;
}
this._is_handler_timed_out = false;
}
isHandlerTimedOut() {
return !!this._is_handler_timed_out;
}
fsReadCallback(err, bytecount, buff) {
try {
if (err) {
throw err;
} else {
this.current_position += bytecount;
var buff_str = buff.toString('utf-8', 0, bytecount);
var that = this;
Promise.resolve().then(function() {
return that.buffStringHandler(buff_str);
}).then(function(is_handler_resolved) {
if (is_handler_resolved) {
that.resolve(false);
} else {
process.nextTick(that.doReading.bind(that));
}
}).catch(function(err) {
that.reject(err);
});
}
} catch(err) {
this.reject(err);
}
}
fsRead(bytecount) {
fs.read(
this.file,
new Buffer(bytecount),
0,
bytecount,
this.current_position,
this.fsReadCallback.bind(this)
);
}
doReading() {
if (this.isHandlerTimedOut()) {
return this.resolve(true);
}
var max_next_bytes = fs.fstatSync(this.file).size - this.current_position;
if (max_next_bytes) {
this.fsRead( (this.byte_size > max_next_bytes) ? max_next_bytes : this.byte_size );
} else {
setTimeout(this.doReading.bind(this), this.check_for_bytes_every_ms);
}
}
promiser() {
var that = this;
return new Promise(function(resolve, reject) {
that.resolve = resolve;
that.reject = reject;
that.doReading();
that.startHandlerTimeout();
}).then(function(was_resolved_by_timeout) {
that.clearHandlerTimeout();
return was_resolved_by_timeout;
});
}
}
module.exports = function(file, buffStringHandler, opts) {
try {
var live_reader = new liveReaderPromiseMe(file, buffStringHandler, opts);
return live_reader.promiser();
} catch(err) {
return Promise.reject(err);
}
};
Then use the above code like this:
var fs = require('fs');
var path = require('path');
var Promise = require('promise');
var liveReadAppendingFilePromiser = require('./path/to/liveReadAppendingFilePromiser');
var ending_str = '_THIS_IS_THE_END_';
var test_path = path.join('E:/tmp/test.txt');
var s_list = [];
var buffStringHandler = function(s) {
s_list.push(s);
var tmp = s_list.join('');
if (-1 !== tmp.indexOf(ending_str)) {
// if this return never occurs, then the file will be read until no_handler_resolution_timeout_ms
// by default, no_handler_resolution_timeout_ms is null, so read will continue forever until this function returns something that evaluates to true
return true;
// you can also return a promise:
// return Promise.resolve().then(function() { return true; } );
}
};
var appender = fs.openSync(test_path, 'a');
try {
var reader = fs.openSync(test_path, 'r');
try {
var options = {
starting_position: 0,
byte_size: 256,
check_for_bytes_every_ms: 3000,
no_handler_resolution_timeout_ms: 10000,
};
liveReadAppendingFilePromiser(reader, buffStringHandler, options)
.then(function(did_reader_time_out) {
console.log('reader timed out: ', did_reader_time_out);
console.log(s_list.join(''));
}).catch(function(err) {
console.error('bad stuff: ', err);
}).then(function() {
fs.closeSync(appender);
fs.closeSync(reader);
});
fs.write(appender, '\ncheck it out, I am a string');
fs.write(appender, '\nwho killed kenny');
//fs.write(appender, ending_str);
} catch(err) {
fs.closeSync(reader);
console.log('err1');
throw err;
}
} catch(err) {
fs.closeSync(appender);
console.log('err2');
throw err;
}

Categories

Resources