I have the following code that reads a CSV file and then pulls a document from the database:
fs.readFile process.env.PWD + '/data/errorports.csv', 'utf8', (err, data) ->
if err
console.log "Error reading csv", err
return
rows = data.split('\n')
for row in rows
columns = row.split(',')
airportCode = columns[0]
airport = Airports.findOne({_id: airportCode})
console.log 'airport:', airport
But when I call Airports.findOne({_id: airportCode}) it throws the error:
/Users/abemiessler/.meteor/packages/meteor-tool/.1.3.4.19lp8gr++os.osx.x86_64+web.browser+web.cordova/mt-os.osx.x86_64/dev_bundle/server-lib/node_modules/fibers/future.js:159
throw new Error('Can\'t wait without a fiber');
^
Error: Can't wait without a fiber
Can anyone see why I would be getting this error? Any suggestions on how to get around it?
You probably need to make use of Meteor.wrapAsync() to do this (to make your async function run inside a Fider...which will allow you to execute your meteor code inside). Here is an example.
var syncReadFile = Meteor.wrapAsync(fs.readFile);
syncReadFile(process.env.PWD + '/data/errorports.csv', 'utf8', function(err, data) {
if (err) {
console.log "Error reading csv", err;
return;
}
rows = data.split('\n');
for (row in rows) {
columns = row.split(',');
airportCode = columns[0];
airport = Airports.findOne({_id: airportCode});
console.log 'airport:', airport;
}
});
Or ... you can try to wrap your callback into
Meteor.bindEnvironment()
Would be something like
fs.readFile process.env.PWD + '/data/errorports.csv', 'utf8',
Meteor.bindEnvironment(function(err, data) {
[..your code..]
});
Related
I have my text data like this and I mark * charset to check unfinished job.
And here's my part of code.
I want to make it when txt lines are start with * charset, it should be processed and after that processed string's * charset should be removed.
fs.readFile('data.txt', async function (err, data) {
if (err) throw err;
let array = data.toString().split("\n");
for (i in array) {
if (array[i].charAt(0) === '*') {
console.log(`Now Processing : ${array[i]} | ${array.length - i -1} items left`);
//
// SOME JOBS
//
let newValue = array[i].replace('*', '');
fs.writeFile('data.txt', newValue, 'utf-8', function (err, data) {
if (err) throw err;
console.log('Done!');
})
} else {
console.log(`${array[i]} Already Captured`)
}
From what I understand you are trying to read through a file, find lines containing "*" character, doing some work, then removing the * from the affected lines.
Firstly, the call to fs.writeFile is happening inside a loop, so every iteration of that loop calls the writeFile function. From the nodejs docs this method will "... asynchronously write data to the file, replacing the file if it already exists." You are replacing the file with every iteration. What you want to do is use fs.appendFile or better yet pass the 'append' system flag to writeFile. System flags can be seen here. Take a look at the 'a' flag and pass it in the options object of writeFile.
Your usage of async in the readFile callback is incomplete also as you don't call await within that callback.
fs.readFile('data.txt', async function (err, data) {
if (err) throw err;
let array = data.toString().split("\n");
for (i in array) {
if (array[i].charAt(0) === '*') {
console.log(`Now Processing : ${array[i]} | ${array.length - i -1} items left`);
//
// SOME JOBS
//
let newValue = array[i].replace('*', '');
newValue = newValue + '\n';
await fs.appendFile('data.txt', newValue, 'utf-8', function (err, data) {
if (err) throw err;
console.log('Done!');
})
} else {
console.log(`${array[i]} Already Captured`)
}
}
});
I am trying to write code that reads a file, counts the lines in it, and then adds another line with the line's number in the beginning. Like an index, basically. The problem is that the fs.appendFile() starts running before fs.readFile() is finished, but I am not sure as to why. Is there something I am doing wrong?
My code:
fs.readFile('list.txt', 'utf-8', (err, data) => {
if (err) throw err;
lines = data.split(/\r\n|\r|\n/).length - 1;
console.log("Im supposed to run first");
});
console.log("Im supposed to run second");
fs.appendFile('list.txt', '[' + lines + ']' + item + '\n', function(err) {
if (err) throw err;
console.log('List updated!');
fs.readFile('list.txt', 'utf-8', (err, data) => {
if (err) throw err;
// Converting Raw Buffer dto text
// data using tostring function.
message.channel.send('List was updated successfully! New list: \n' + data.toString());
console.log(data);
});
});
My output:
Im supposed to run second
List updated!
Im supposed to run first
[0]first item
Currently, you are using readFile and appendFile. Both of these functions are asynchronous and will run at the same time, returning whenever they complete.
If you'd like to run these synchronously, you can use the fs.readFileSync and fs.appendFileSync methods to synchronously read and append to the files.
Therefore, with something like the following:
const readFileData = fs.readFileSync("list.txt");
fs.appendFileSync('list.txt', '[' + lines + ']' + item + '\n');
The first line of code will run, then the second line of code.
The functions you are using are asynchronous, so the response of the second function can be received before the response of the first one.
fs.readFile('list.txt', 'utf-8', (err, data) => {
if (err) throw err;
lines = data.split(/\r\n|\r|\n/).length - 1;
console.log("Im supposed to run first");
appendFile(lines);
});
let appendFile = (lines)=> {
fs.appendFile('list.txt', '[' + lines + ']' + item + '\n', function(err) {
console.log("Im supposed to run second");
if (err) throw err;
console.log('List updated!');
fs.readFile('list.txt', 'utf-8', (err, data) => {
if (err) throw err;
// Converting Raw Buffer dto text
// data using tostring function.
message.channel.send('List was updated successfully! New list: \n' + data.toString());
console.log(data);
});
});
}
i have a csv that I want to save into my mySQL Table. My parser works well and it also save the created json Array to my table. My problem is that he insert row for row in the background and don't response it.
My code looks like that:
var file= './mytable.csv';
connection.connect(function (err) {});
var csv = require('csvtojson');
csv({ delimiter:","})
.fromFile(file)
.on('end_parsed', function(jsonArray){
for(var i = 0; i < jsonArray.length; i++){
var post = jsonArray[i]
conn.query('INSERT INTO mytable SET ?', post, function(err, results) {
if (err) throw err;
console.log(result.insertId);
});
}
res.end("done");
})
.on('done', function(error){
console.log('end')
})
My Goal is that my api send: its "done" with (res.json("Done")) when the complete query is done. What should I change?
Greetings
edit: my csv is realy large, with almost 500k rows!
EDIT:
I inserted async into my parser like that:
csv({ delimiter:";"})
.fromFile(file)
.on('end_parsed', function(jsonArray) {
async.forEach(jsonArray, function (jsonArrays, callback) {
conn.query('INSERT INTO mytable SET ?', jsonArrays, callback);
}, function (err) {
if (err) return next(err);
res.json("done");
console.log("done")
});
});
But he don't responde with "done" (in Terminal he write it, but postman give me only "Could not get any response")
Your call to res.end()/ res.json() doesn't wait for all inserts to be finished.
And if you start your inserts within a for-loop you start them all more or less in parallel. You should take look at something like the async library (http://caolan.github.io/async). There you find a eachLimit() function that lets you run async operations on a collection/array. With this function you can limit how many operations can run in parallel. And you get a callback that is called when an error happens or all async calls have finished. Within this callback you can call the res.json(...) function to send your response.
Sample:
var async = require('async');
//...
function save_row_to_db (post, callback) {
conn.query('INSERT INTO mytable SET ?', post, callback);
}
function finished(err) {
if (err) throw err;
res.end("done");
}
async.eachLimit(csvRows, 20, save_row_to_db, finished);
So i have a csv file containing my information, i need to do a mass add/update
exports.add_questions_from_file = function (file_path, surveyid, callback)
{
var U = [{}];
fs.readFile(file_path, 'utf8', function(err, data){
if (err){
console.log(err);
callback(err,null);
}else{
console.log(data);
d = data.split(/\r\n|\n/);
for (x=0;x <d.length;x++)
{
line = d[x].split(',');
if (line[0] == "") {return};
RQuestion.add_by_line (line,function (err, question)
{
U.push({id:question.id});
console.log(U);
});
}
}
});
Survey.update({_id:surveyid},{$push:{"SurveyQuestions":U}},function (err,numAffected, rawResponse) {
console.log(rawResponse);
RET = {"module":"survey","operation": "add", "status":"OK"};
callback(RET);
});
};
But even though im using callback functions the update seems to happen with the same object always, even the console.log here
U.push({id:question.id});
console.log(U);
returns the same object (even that all the other were created)
Im doing something wrong?
I see a few issues.
First for:
if (line[0] == "") {return};
Don't you mean to use a break or continue instead? Otherwise the entire function will quit if there is a blank line anywhere in the file. This is very important because Survey.update won't get called either.
Second: I assumed that RQuestion.add_by_line and Survey.update are doing something async like updating a database. Your code needs to be restructured to wait for those async items to complete before moving on to the next step. I'd recommend an npm package named async for that.
fs.readFile(file_path, 'utf8', function(err, data){
if (err){
console.log(err);
callback(err,null);
}else{
d = data.split(/\r\n|\n/);
async.map(d, function(line, callback) {
//this function is called for each line
add_by_line (line,function (err, question)
{
callback(err,{id:question.id});
});
}, function(err, results) {
//this function is called when all of the items are done
console.log("done with async");
console.dir(results);
Survey.update({_id:surveyid},{$push:{"SurveyQuestions":results},function (err,numAffected, rawResponse) {
console.log(rawResponse);
RET = {"module":"survey","operation": "add", "status":"OK"};
callback(RET);
});
});
}
});
I have updated the post with the actual code.
The problem is that the node app hangs and does not exit unless I comment out the query in addArticle. I am wonder what I'm doing wrong here (in regards to the hanging problem).
function addArticle(title, text, date, link) {
connection.query("SELECT * FROM articles WHERE link LIKE "+connection.escape(link), function(error, rows, fields) {
if(rows.length == 0) {
console.log("article not in database");
console.log(connection.escape(title));
var values = [connection.escape(title), connection.escape(text), date, connection.escape(link), '{}'];
connection.query("INSERT INTO articles (title, text, date, link, topics) VALUES ?", [[values]], function(err) {
if(err) throw err;
});
}
});
}
function scrapeReuters() {
var url = 'http://www.reuters.com/news/archive/technologyNews?date=10092013';
request(url, function(err, resp, body){
$ = cheerio.load(body);
links = $('a');
$(links).each(function(i, link){
var addr = $(link).attr('href');
if(addr != undefined && addr.indexOf('article') != -1 && addr.indexOf('http') == -1 ) {
var full_link = "http://www.reuters.com"+addr;
var title = $(link).text();
request(full_link, function(err, resp, body){
$ = cheerio.load(body);
para = $('p').text();
addArticle(title, para,new Date().getTime(), full_link);
});
}
});
});
}
You probably need to close the connection after all the queries have finished. You can try using the https://github.com/caolan/async library to run the queries in sequence and then in a master callback, close the connection.
Its a little tricky, but first you need to define an array of functions to execute. Then you run async.sequence(arrayOfFns,masterCallback). The master callback gets errs and results (notice plural, its from all the functions). In that master callback, terminate the mysql connection/and or end the process.
To do this, I would rewrite the addArticle query to just return the query string. Then before your $(links).each loop, I would make an array called toInsert
In each loop I would say
toInsert.push(function(callback) {
connection.query(addArticle(...),function(err) {
if(err) callback(err);
else callback(null,true);
});
});
Then after the loop run
async.sequence(toInsert,function(errs,results) {
connection.close() //not sure if correct
process.exit(); //maybe, if needed?
});