I have this code:
#!/usr/bin/env node
'use strict';
const request = require('superagent');
const querystring = require('querystring');
const path = require('path');
const fs = require('fs');
const timestamp = Math.floor(Date.now() / 1000).toString();
const logdir = path.join(__dirname, '../web/public/data/')
const logfile = path.join(logdir, timestamp + '.json');
// newsapi.org api key
const NEWSAPI_KEY = process.env.NEWSAPI_KEY;
// sources endpoint
const SOURCES = 'https://newsapi.org/v1/sources?language=en';
// articles endpoint
const ARTICLE_ENDPOINT = 'https://newsapi.org/v1/articles?';
function getLatest(src){
let sb = ['top','latest','popular'];
for (let i in sb){
let qs = querystring.stringify({source: src, sortBy: sb[i], apiKey: NEWSAPI_KEY});
request
.get(ARTICLE_ENDPOINT + qs)
.end((err, res) => {
if (!err){
fs.appendFile(logfile, JSON.stringify(res.body.articles), (err) => {
});
}
});
}
}
request
.get(SOURCES)
.end((err, res) => {
if (!err){
for ( var i in res.body.sources){
getLatest(res.body.sources[i].id);
}
}
});
The file that this code writes ends up looking like this
[{...},{...},{...}][{...},{...},{...}]...
This is invalid JSON. How would I make sure that it's valid like this
[{...},{...},{...},{...},{...},{...}]
I've tried so many things to make it work including applying a regex to the entire file after the fact, wrapping superagent in a function that returns a promise, and editing the file after the fact in the code that relies on this script to run. All to no avail. I know there must be a better (proper?) way to do this.
Use regex to find pattern "][" and replace it with ","
Rather than appending to the file with fs.appendFile, create an array before the loop, append to that and then serialise it.
function getLatest(src){
let sb = ['top','latest','popular'];
let reqs = sb.length;
let allArticles = [];
for (let i in sb){
let qs = querystring.stringify({source: src, sortBy: sb[i], apiKey: NEWSAPI_KEY});
request
.get(ARTICLE_ENDPOINT + qs)
.end((err, res) => {
if (!err){
allArticles.push(res.body.articles);
}
else{
// error happend, decrement the number of requests needed for success.
reqs--;
}
// if the number of responses is as expected, write to the json all at once.
if(allArticles.length == reqs)
fs.writeFile(logfile, JSON.stringify(allArticles), (err) => {});
});
}
}
The problem was that you are serialising individual data and then appending to the file rather than joining the data, serialising in one big go and then writing to file.
Related
I am beginning learning the basics of developing APIs and am following along with a video on YouTube by a developer named Ania Kubow. There are three javascript libraries in use which are ExpressJS, Cheerio & Axios. I have picked up what she is telling us and going well enough. I am however, getting an error when executing the code so far. Below here is the main part of the example API:
Note:The "app" variable is referring to Express JS
app.get('/news', (req, res) => {
axios.get('https://www.theguardian.com/environment/climate-crisis')
.then((response) => {
// Store the html retrieved via the axios http GET request
const html = response.data;
// Load the retrieved html into the cheerio instance and store to $ cheerio selector
const $ = cheerio.load(html);
// DEBUG purpose stuff
var loopCounter = 0;
var climate_A_Elements = $('a:contains(climate)', html).length;
console.log('Number of articles found: ' + climate_A_Elements);
//
// Get all articles that contain 'climate' in the <a /> element
var allFoundArticles = $('a:contains("climate")', html);
// Iterate through all found articles using cheerio forEach loop
allFoundArticles.each(function () {
// Assign article title and url it is located at to variables
var title = $(this).text();
var url = $(this).attr('href');
// Push the previously defined vars to the previously defined array
articlesData.push({
title,
url,
});
// Output the article details to page as response
res.json(articlesData);
// Add to loop count for debugging purposes and log in console
loopCounter += 1;
console.log('Loops: ' + loopCounter);
}).catch(err => {
console.log(err);
})
})
})
After completing one iteration of the Cheerio each loop, the application errors and gives the below error output:
node:internal/errors:484
ErrorCaptureStackTrace(err);
^
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
at new NodeError (node:internal/errors:393:5)
at ServerResponse.setHeader (node:_http_outgoing:644:11)
at ServerResponse.header (C:\Users\etomm\Downloads\Daniel\Dev\Web\climate-change-api\node_modules\express\lib\response.js:794:10)
at ServerResponse.json (C:\Users\etomm\Downloads\Daniel\Dev\Web\climate-change-api\node_modules\express\lib\response.js:275:10)
at Element.<anonymous> (C:\Users\etomm\Downloads\Daniel\Dev\Web\climate-change-api\index.js:65:21)
at LoadedCheerio.each (C:\Users\etomm\Downloads\Daniel\Dev\Web\climate-change-api\node_modules\cheerio\lib\api\traversing.js:519:26)
at C:\Users\etomm\Downloads\Daniel\Dev\Web\climate-change-api\index.js:52:30
at process.processTicksAndRejections (node:internal/process/task_queues:95:5) {
code: 'ERR_HTTP_HEADERS_SENT'
}
Node.js v18.12.1
[nodemon] app crashed - waiting for file changes before starting...
I have of course, left a question as a comment on the video mentioned above, but realistically I don't expect to receive a response, so after trying to find the issue for a while decided to try here for some help and guidance.
Any guidance would be greatly appreciated. If you require anything to help further please ask.
Thankyou.
I tried changing to the version of node installed on the video mentioned to see if it was a version issue, but it didn't solve anything. I tried refactoring the code and stepping through a few times with no results and tried searching the web but couldn't find the answer to this particular issue.
Asad's answer is correct; you're sending responses for each element in the loop. The request is completed after the first iteration, so when the second iteration occurs, Express complains that you're trying to respond a second time to an already-completed response.
The minimal failing pattern is:
app.get("/news", (req, res) => {
for (let i = 0; i < 2; i++) {
res.json({message: "this won't work"});
}
});
The fix is:
app.get("/news", (req, res) => {
for (let i = 0; i < 2; i++) {
// do whatever you need to in the loop
}
// respond once after the loop is finished
res.json({message: "this works"});
});
Here's a complete, runnable example:
const app = require("express")();
const axios = require("axios");
const cheerio = require("cheerio");
app.get("/news", (req, res) => {
axios
.get("https://www.theguardian.com/environment/climate-crisis")
.then((response) => {
// Store the html retrieved via the axios http GET request
const html = response.data;
// Load the retrieved html into the cheerio instance and store to $ cheerio selector
const $ = cheerio.load(html);
// DEBUG purpose stuff
var loopCounter = 0;
var climate_A_Elements = $("a:contains(climate)", html).length;
console.log("Number of articles found: " + climate_A_Elements);
//
// Get all articles that contain 'climate' in the <a /> element
var allFoundArticles = $('a:contains("climate")', html);
// Iterate through all found articles using cheerio forEach loop
///////////////////////////
const articlesData = []; // <--------- Added
///////////////////////////
allFoundArticles
.each(function () {
// Assign article title and url it is located at to variables
var title = $(this).text();
var url = $(this).attr("href");
// Push the previously defined vars to the previously defined array
articlesData.push({
title,
url,
});
// Add to loop count for debugging purposes and log in console
loopCounter += 1;
console.log("Loops: " + loopCounter);
})
// Output the article details to page as response
//////////////////////////
res.json(articlesData); // <--------- Moved down
//////////////////////////
});
});
app.listen(3000);
Test it:
$ curl localhost:3000/news | jq 'length'
14
$ curl localhost:3000/news | jq '.[0].title'
"Australia news Australia???s banks likely to reduce lending to regions and sectors at risk of climate change impacts, regulator says "
Dropping the extraneous code gives a pretty simple result:
const app = require("express")();
const axios = require("axios");
const cheerio = require("cheerio");
app.get("/news", (req, res) => {
const url = "https://www.theguardian.com/environment/climate-crisis";
axios.get(url).then(({data}) => {
const $ = cheerio.load(data);
const articles = [...$('a:contains("climate")')].map(e => ({
title: $(e).text().trim(),
url: $(e).attr("href"),
}));
res.json(articles);
})
.catch(err => {
res.status(500).json({message: err.message});
});
});
app.listen(3000);
Or with async/await:
app.get("/news", async (req, res) => {
try {
const url = "https://www.theguardian.com/environment/climate-crisis";
const {data} = await axios.get(url);
const $ = cheerio.load(data);
const articles = [...$('a:contains("climate")')].map(e => ({
title: $(e).text().trim(),
url: $(e).attr("href"),
}));
res.json(articles);
}
catch (err) {
res.status(500).json({message: err.message});
}
});
You are sending response mulitple times in a single request
Try to push you result in an array then send it after your loop executed successfully.
Try this code.
app.get('/news', (req, res) => {
axios.get('https://www.theguardian.com/environment/climate-crisis')
.then((response) => {
// artical data array
const articlesData = [];
// Store the html retrieved via the axios http GET request
const html = response.data;
// Load the retrieved html into the cheerio instance and store
//to $ cheerio selector
const $ = cheerio.load(html);
// DEBUG purpose stuff
var loopCounter = 0;
var climate_A_Elements = $('a:contains(climate)', html).length;
console.log('Number of articles found: ' + climate_A_Elements);
//
// Get all articles that contain 'climate' in the <a /> element
var allFoundArticles = $('a:contains("climate")', html);
// Iterate through all found articles using cheerio forEach
//loop
allFoundArticles.each(function () {
// Assign article title and url it is located at to variables
var title = $(this).text();
var url = $(this).attr('href');
// Push the previously defined vars to the previously defined
//array
articlesData.push({
title,
url,
});
// Output the article details to page as response
// Add to loop count for debugging purposes and log in
//console
loopCounter += 1;
console.log('Loops: ' + loopCounter);
})
res.json(articlesData);
}).catch(err => {
console.log(err);
})
})
You should always ask yourself why you got this error, in your case because you put res.json inside the loop, you should always call res.json /sending the response once time , The recommendation from folks will help you,
I had a problem with the same error.
You are trying to send a response multiple times with res.json(articlesData)
Try this:
app.get('/news', (req, res) => {
axios.get('https://www.theguardian.com/environment/climate-crisis')
.then((response) => {
// Store the html retrieved via the axios http GET request
const html = response.data;
// Load the retrieved html into the cheerio instance and store to $ cheerio selector
const $ = cheerio.load(html);
// DEBUG purpose stuff
var loopCounter = 0;
var climate_A_Elements = $('a:contains(climate)', html).length;
console.log('Number of articles found: ' + climate_A_Elements);
//
// Get all articles that contain 'climate' in the <a /> element
var allFoundArticles = $('a:contains("climate")', html);
// Iterate through all found articles using cheerio forEach loop
allFoundArticles.each(function () {
// Assign article title and url it is located at to variables
var title = $(this).text();
var url = $(this).attr('href');
// Push the previously defined vars to the previously defined array
articlesData.push({
title,
url,
});
// Output the article details to page as response
// Add to loop count for debugging purposes and log in console
loopCounter += 1;
console.log('Loops: ' + loopCounter);
}).then(res.json(articlesData))
.catch(err => {
console.log(err);
})
})
})
I have tried async/await & using promises however I cannot get this code to execute in order.
The code iterates through a document and parses it before saving it to an array, then saving the array to .json file.
The code continues to run before the loop finishes however which means it writes an empty file as the parsing has not been completed.
Turning it into an async function to await does not solve the issue. Nor does returning a promise and then using .then() to execute final code. It still runs straight away.
const fs = require('fs');
const cheerio = require('cheerio');
const mammoth = require("mammoth");
const articleFolder = './Articles/';
var allArticles = [];
const extractDocuments = async () => {
let files = fs.readdirSync(articleFolder);
for(const file of files) {
await convertToHTML(file);
}
completedExtraction();
}
async function convertToHTML(filename) {
var filepath = articleFolder + filename;
mammoth.convertToHtml({path: filepath})
.then(function(result){
let html = result.value; // The generated HTML
let messages = result.messages; // Any messages, such as warnings during conversion
updateArticles(filename, html);
})
.done();
}
function updateArticles (filename, html) {
var article = {
file: filename,
content: parseHTML(html)
}
allArticles.push(article);
}
function parseHTML (html) {
let $ = cheerio.load(html);
let title = $('h3').first().text();
let date = $('h3:eq(1)').text();
$('h3').slice(0,2).remove()
let content = $('body').html();
let parsedArticle = {
title: title,
date: date,
content: content
}
return parsedArticle;
}
function completedExtraction() {
fs.writeFile('./articles.json', JSON.stringify(allArticles), (err)=>{
if (err) throw err;
console.log('File Written.');
});
console.log('Finished.');
}
extractDocuments();
To solve with map I would do something similar to:
const extractDocuments = async () => {
let files = fs.readdirSync(articleFolder);
const articlePromises = files.map(async file => {
const html = await convertToHTML(file)
return {
filename: file,
html: html
}
})
allArticles = await Promise.all(articlePromises)
completedExtraction();
}
async function convertToHTML(filename) {
var filepath = articleFolder + filename;
return mammoth.convertToHtml({path: filepath})
.then(function(result){
let html = result.value; // The generated HTML
let messages = result.messages; // Any messages, such as warnings during conversion
return html
})
.done();
}
So to wrap up extractDocuments uses a map to iterate and create articles. convertToHTML only returns the created HTML and nothing more. We no longer use the updateArticles since this is now handled in the extractDocuments
Hopes this helps a bit. Hope it points you in the right direction
I am writing code that generates a very large JSON object, saves it to a file, then loads the file and inserts the data into a Mongo collection. I want to pass a string from the command line when calling the script that I use to set the file name, as well as the collection name. I call it like so: node --max-old-space-size=8192 data_generator.js foo 1000000.
The code fails with error ENOENT: no such file or directory, open 'foo.json' on the third line of the function gen_collection() where I set the variable data. This error does not appear when a file foo.json already exists, even if it is empty. Before it fails, the code successfully creates a file foo.json but it contains only an empty array [].
The code fails with this same exact error when I include any reference to process.argv. This includes when I try to set any variable to a value from the process.argv array. The code works when I set the variables fname as const fname = "foo" and size as const size = 0. However, even if the only reference I have to process.argv is in a console.log i.e. adding console.log(process.argv[2] to main(), it fails with the exact same error as above.
Here is the code I am trying to run:
const { MongoClient } = require("mongodb");
const fs = require('fs');
const bjson = require('big-json');
async function main() {
const uri = "my db uri";
const client = new MongoClient(uri);
const fname = process.argv[2];
const size = parseInt(process.argv[3]);
// const fname = 'small'
// const size = 1
try {
await client.connect({ useUnifiedTopology: true });
await write_data_to_disk(fname, size);
await gen_collection(client, fname);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
};
// generate data as json aray and write to local file
async function write_data_to_disk(fname, size) {
let arr = [];
for (let i = 0; i < size; i++) {
let doc = gen_document();
arr.push(doc);
}
const strStream = bjson.createStringifyStream({
body: arr
})
let logger = fs.createWriteStream(`${fname}.json`);
strStream.on('data', (d) => {
logger.write(d);
})
};
async function gen_collection(client, fname) {
let db = client.db('test');
let collection = db.collection(fname);
let data = JSON.parse(fs.readFileSync(`${fname}.json`, 'utf8')); // ERROR APPEARS ON THIS LINE
bulkUpdateOps = [];
data.forEach((doc) => {
bulkUpdateOps.push({"insertOne": {"document": doc}});
if (bulkUpdateOps.length === 1000) {
collection.bulkWrite(bulkUpdateOps);
bulkUpdateOps = [];
}
})
if (bulkUpdateOps.length > 0) {
collection.bulkWrite(bulkUpdateOps);
}
};
function gen_document() {
// returns json object
};
You're doing
await write_data_to_disk(...)
but that function doesn't return a promise that is connected to when it's done. So, you're trying to read the resulting file BEFORE it has been created or before it has valid content in it and thus the ENOENT error as the file doesn't yet exist when you're trying to read from it in the following function.
Writestreams do not play nicely with promises unless you wrap them in your own promise that resolves when you are completely done writing to the stream and the file has been closed.
Also, you probably want to just .pipe() strStream to the logger stream. Much easier and you can then just monitor when that pipe() operation is done to resolve the promise you wrap around that operation.
You can promisify write_data_to_disk() like this:
// generate data as json aray and write to local file
function write_data_to_disk(fname, size) {
return new Promise((resolve, reject) => {
const arr = [];
for (let i = 0; i < size; i++) {
let doc = gen_document();
arr.push(doc);
}
const strStream = bjson.createStringifyStream({ body: arr });
const dest = fs.createWriteStream(`${fname}.json`, {emitClose: true});
// monitor for completion and errors
dest.on('error', reject).on('close', resolve);
strStream.on('error', reject);
// pipe all the content from strStream to the dest writeStream
strStream.pipe(dest);
});
}
Since this returns a promise that is truly tied to when the write operation is done, you can then use await write_data_to_disk(...).
Hello friends, Help me out for coding dialogflow fulfillment.
here is the code where i'm invoking GET Api in inner request module and i want to api's output into outer function in conv.ask('Sales is 1 million metric tonnes ' + b)
Code:
var request = require('request');
var code1 = null;
const bodyParser = require('body-parser')
const { dialogflow } = require('actions-on-google');
const assistant = dialogflow({
clientId: "305xxxx407-rv9kocdxxxxxxxxxciouuq8f9ul2eg.apps.googleusercontent.com"
});
module.exports = (app) => {
const logger = console;
assistant.intent('Sales', (conv) => {
const baseurl = 'https://www.ixxxxxxt.in:3500/getunits?unitcode=4';
var a = request(baseurl, function(error, res, body) {
var Unit = JSON.parse(body);
if (!error && res.statusCode == 200) {
var code = JSON.stringify(Unit.description);
//res.render(test(Unit));
console.log(code); // Print the google web page.
}
})
var b = (a.code);
console.log(b);
conv.ask('Sales is 1 million metric tonnes ' + b);
})
You have a few issues here.
The first is understanding what request() is doing. You probably don't want what request() returns, but instead want access to the body, which you get from the function you define.
That function is actually the second parameter that you've passed to request(). It is referred to as the callback function, since when request() gets the data from the URL, it will call that function. So everything you want to do with body needs to be done inside the callback function.
However, since you're using the Dialogflow library, and this is being done inside an Intent Handler, you need to return a Promise to indicate that you're waiting for a result before it can reply to the user. While you can wrap request() in a Promise, there are better solutions, most notably using the request-promise-native package, which is very similar to the request package, but uses Promises.
This makes things a lot easier. Your code might look something more like this (untested):
var request = require('request-promise-native');
var code1 = null;
const { dialogflow } = require('actions-on-google');
const assistant = dialogflow({
clientId: "305xxxx407-rv9kocdxxxxxxxxxciouuq8f9ul2eg.apps.googleusercontent.com"
});
module.exports = (app) => {
const logger = console;
assistant.intent('Sales', (conv) => {
const baseurl = 'https://www.ixxxxxxt.in:3500/getunits?unitcode=4';
return request(baseurl)
.then( body => {
// You don't need the body parser anymore
let code = body.description;
conv.ask('Sales is 1 million metric tonnes ' + code);
})
.catch( err => {
console.error( err );
conv.ask('Something went wrong. What should I do now?');
});
})
I'm looking for an efficient way to take a raw sql file and have it executed synchronously against a postgres database, akin to if you ran it through psql.
I have an sql file which creates all databases, imports data, etc. I need to execute this using node.js but cannot find any module which does this automatically. For the node.js application itself, we use node-postgres ('pg'), knex.js and bookshelf.js. I assume though that pg is best for this.
One alternative I can think of is to read the full file, split it by semicolons, replace newlines with spaces, trim any duplicate space, then feed it into pg one by one in a manner that they're executed sequentially, not asynchronously. I'm a little surprised if this is truly the most efficient way and also if no libraries exist yet to solve this. I'm a little hesitant to jump into it seeing as SQL syntax can itself be a little challenging and I might accidentally mash it up.
Some clarifications in advance:
psql cannot be used as it's not installed on the target machine
I've chosen to develop and source control sql statements in sql native form, because it's a lot easier for a DBA to use and manipulate it
You can just separate consequent queries with a semicolon when passed to client.query
That works:
var pg = require('pg');
pg.connect('postgres://test:test#localhost/test', function(err, client, done){
client.query('CREATE TABLE test (test VARCHAR(255)); INSERT INTO test VALUES(\'test\') ');
done();
});
And consequently, that works too:
var pg = require('pg');
var fs = require('fs');
var sql = fs.readFileSync('init_database.sql').toString();
pg.connect('postgres://test:test#localhost/test', function(err, client, done){
if(err){
console.log('error: ', err);
process.exit(1);
}
client.query(sql, function(err, result){
done();
if(err){
console.log('error: ', err);
process.exit(1);
}
process.exit(0);
});
});
I've written the following function which works for my case. It would have been much more simpler if it weren't for:
Using batch to manage concurrency
Having the tricky PostgreSQL COPY case to consider
Code snippet:
function processSQLFile(fileName) {
// Extract SQL queries from files. Assumes no ';' in the fileNames
var queries = fs.readFileSync(fileName).toString()
.replace(/(\r\n|\n|\r)/gm," ") // remove newlines
.replace(/\s+/g, ' ') // excess white space
.split(";") // split into all statements
.map(Function.prototype.call, String.prototype.trim)
.filter(function(el) {return el.length != 0}); // remove any empty ones
// Execute each SQL query sequentially
queries.forEach(function(query) {
batch.push(function(done) {
if (query.indexOf("COPY") === 0) { // COPY - needs special treatment
var regexp = /COPY\ (.*)\ FROM\ (.*)\ DELIMITERS/gmi;
var matches = regexp.exec(query);
var table = matches[1];
var fileName = matches[2];
var copyString = "COPY " + table + " FROM STDIN DELIMITERS ',' CSV HEADER";
var stream = client.copyFrom(copyString);
stream.on('close', function () {
done();
});
var csvFile = __dirname + '/' + fileName;
var str = fs.readFileSync(csvFile);
stream.write(str);
stream.end();
} else { // Other queries don't need special treatment
client.query(query, function(result) {
done();
});
}
});
});
}
Beware that this would fail if you used semicolons anywhere except to terminate SQL statements.
The #databases/pg client supports running SQL files out of the box:
const createPool = require('#databases/pg');
const {sql} = require('#databases/pg');
const db = createPool();
db.query(sql.file('my-file.sql')).catch(ex => {
console.error(ex);
process.exitCode = 1;
}).then(() => db.dispose());
It also supports having multiple statements in a single call to db.query:
const createPool = require('#databases/pg');
const {sql} = require('#databases/pg');
const db = createPool();
db.query(sql`
INSERT INTO users (name) VALUES (${'Forbes'});
SELECT * FROM users;
`)).then(
results => console.log(results)
).catch(ex => {
console.error(ex);
process.exitCode = 1;
}).then(() => db.dispose());
In this example, each statement is run in sequence, and the result of the last statement is returned.
The following, which just reads a file into a string and runs it using query seems to work for me:
const { Pool } = require("pg");
const pool = new Pool({ host, port, user, password, database });
dbClient = await pool.connect();
var sql = fs.readFileSync("/path/to/file.sql", "utf8");
await dbClient.query(sql);
In case it also helps, here is further code to run all "*.sql" files in a directory in alphabetical order:
const pathWithSqlFiles = "/path/to/sqldir";
const filenames = fs
.readdirSync(pathWithSqlFiles, { withFileTypes: true })
.filter((item) => !item.isDirectory() && item.name.toLowerCase().endsWith(".sql"))
.map((item) => item.name);
for (const filename of filenames) {
var sql = fs.readFileSync(`${pathWithSqlFiles}/${filename}`, "utf8");
await dbClient.query(sql);
}
(Don't forget to close the client connection at some point after this using await dbClient.end()).
There are many ways to import a database through SQL file the simplest and fasted way is to just run this command in you cmd where your file is saved:
psql -h localhost -U postgres -d myDataBase -a -f myFile.sql
Or you can read and parse the file through node.js and run it. But it would take time.
function processSQLFile(fileName) {
// Extract SQL queries from files. Assumes no ';' in the fileNames
var queries = fs.readFileSync(fileName).toString()
.replace(/(\r\n|\n|\r)/gm," ") // remove newlines
.replace(/\s+/g, ' ') // excess white space
.split(";") // split into all statements
.map(Function.prototype.call, String.prototype.trim)
.filter(function(el) {return el.length != 0}); // remove any empty ones
// Execute each SQL query sequentially
queries.forEach(function(query) {
batch.push(function(done) {
if (query.indexOf("COPY") === 0) { // COPY - needs special treatment
var regexp = /COPY\ (.*)\ FROM\ (.*)\ DELIMITERS/gmi;
var matches = regexp.exec(query);
var table = matches[1];
var fileName = matches[2];
var copyString = "COPY " + table + " FROM STDIN DELIMITERS ',' CSV HEADER";
var stream = client.copyFrom(copyString);
stream.on('close', function () {
done();
});
var csvFile = __dirname + '/' + fileName;
var str = fs.readFileSync(csvFile);
stream.write(str);
stream.end();
} else { // Other queries don't need special treatment
client.query(query, function(result) {
done();
});
}
});
});
}