Error in const bitmap = fs.readFileSync(file); - javascript

function readImageFile(file) {
const bitmap = fs.readFileSync(file);
const buf = new Buffer.from(bitmap);
return buf;
}
dir.files(__dirname, function(err, files) {
if (err) throw err;
console.log(files);
//we have an array of files now, so now we'll iterate that array
files.forEach(function(filepath) {
photo = readImageFile(filepath);
var query = 'UPDATE cloth SET access = "?" WHERE Image = "?"'
if (filepath.substring(35,filepath.length -4)!="x.js"){
values = [
photo,
filepath.substring(35,filepath.length -4)
];
pool.query(query, values, function(err, res) {
if (err) throw err;
console.log("BLOB data inserted!");
const bitmap = fs.readFileSync(file);
TypeError: Cannot read properties of undefined (reading 'readFileSync')
I need to duplicate this function, how do I do it? I have several tables to load the BLOB into
I tried changing the ts values, but all in vain. I'm zero in JavaScript and I need to submit a paper. Please help me.

The error you are getting is because the fs module, which contains the readFileSync function, is not being imported or defined in your code.
To fix this, you need to import the fs module at the top of your code by adding this line:
const fs = require('fs');
To duplicate the function, you can simply copy and paste the entire readImageFile function and give it a different name. However, keep in mind that if you are going to use the new function in a different file, you will need to import the fs module in that file as well.

Related

Making any reference to Nodejs' process.argv causes errors in unexpected place (reading a file)

I am writing code that generates a very large JSON object, saves it to a file, then loads the file and inserts the data into a Mongo collection. I want to pass a string from the command line when calling the script that I use to set the file name, as well as the collection name. I call it like so: node --max-old-space-size=8192 data_generator.js foo 1000000.
The code fails with error ENOENT: no such file or directory, open 'foo.json' on the third line of the function gen_collection() where I set the variable data. This error does not appear when a file foo.json already exists, even if it is empty. Before it fails, the code successfully creates a file foo.json but it contains only an empty array [].
The code fails with this same exact error when I include any reference to process.argv. This includes when I try to set any variable to a value from the process.argv array. The code works when I set the variables fname as const fname = "foo" and size as const size = 0. However, even if the only reference I have to process.argv is in a console.log i.e. adding console.log(process.argv[2] to main(), it fails with the exact same error as above.
Here is the code I am trying to run:
const { MongoClient } = require("mongodb");
const fs = require('fs');
const bjson = require('big-json');
async function main() {
const uri = "my db uri";
const client = new MongoClient(uri);
const fname = process.argv[2];
const size = parseInt(process.argv[3]);
// const fname = 'small'
// const size = 1
try {
await client.connect({ useUnifiedTopology: true });
await write_data_to_disk(fname, size);
await gen_collection(client, fname);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
};
// generate data as json aray and write to local file
async function write_data_to_disk(fname, size) {
let arr = [];
for (let i = 0; i < size; i++) {
let doc = gen_document();
arr.push(doc);
}
const strStream = bjson.createStringifyStream({
body: arr
})
let logger = fs.createWriteStream(`${fname}.json`);
strStream.on('data', (d) => {
logger.write(d);
})
};
async function gen_collection(client, fname) {
let db = client.db('test');
let collection = db.collection(fname);
let data = JSON.parse(fs.readFileSync(`${fname}.json`, 'utf8')); // ERROR APPEARS ON THIS LINE
bulkUpdateOps = [];
data.forEach((doc) => {
bulkUpdateOps.push({"insertOne": {"document": doc}});
if (bulkUpdateOps.length === 1000) {
collection.bulkWrite(bulkUpdateOps);
bulkUpdateOps = [];
}
})
if (bulkUpdateOps.length > 0) {
collection.bulkWrite(bulkUpdateOps);
}
};
function gen_document() {
// returns json object
};
You're doing
await write_data_to_disk(...)
but that function doesn't return a promise that is connected to when it's done. So, you're trying to read the resulting file BEFORE it has been created or before it has valid content in it and thus the ENOENT error as the file doesn't yet exist when you're trying to read from it in the following function.
Writestreams do not play nicely with promises unless you wrap them in your own promise that resolves when you are completely done writing to the stream and the file has been closed.
Also, you probably want to just .pipe() strStream to the logger stream. Much easier and you can then just monitor when that pipe() operation is done to resolve the promise you wrap around that operation.
You can promisify write_data_to_disk() like this:
// generate data as json aray and write to local file
function write_data_to_disk(fname, size) {
return new Promise((resolve, reject) => {
const arr = [];
for (let i = 0; i < size; i++) {
let doc = gen_document();
arr.push(doc);
}
const strStream = bjson.createStringifyStream({ body: arr });
const dest = fs.createWriteStream(`${fname}.json`, {emitClose: true});
// monitor for completion and errors
dest.on('error', reject).on('close', resolve);
strStream.on('error', reject);
// pipe all the content from strStream to the dest writeStream
strStream.pipe(dest);
});
}
Since this returns a promise that is truly tied to when the write operation is done, you can then use await write_data_to_disk(...).

A strange for_of loop problem in Node.js: cannot iterate properly [duplicate]

This question already has answers here:
JavaScript closure inside loops – simple practical example
(44 answers)
Closed 2 years ago.
I am working on a python project and need to get some data from a Node.js API, so I try to code some node.js which I just have little knowledge about it.
My code is like:
const SneaksAPI = require('sneaks-api');
const sneaks = new SneaksAPI();
const fs = require('fs');
let idList = ["FY2903", "FZ5246", "FZ5421"];
for (var shoeID of idList){
sneaks.getProductPrices(shoeID, function(err, product){
var data = JSON.stringify(product, null, 2);
fs.writeFile(`/Users/sam/Downloads/${shoeID}.json`, data, (err) => {
if (err) throw err;
console.log(`The file ${shoeID}.json has been saved!`);
});
});
}
It seems that something goes wrong in the loop... the output is like:
The file FZ5421.json has been saved!
The file FZ5421.json has been saved!
The file FZ5421.json has been saved!
Only the last element in the idList is assigned and when I open the json file I find that the content actually belongs to the second element "FZ5246"...
I am sure there's something wrong with the code...Any one can help me to figure this out?
Your loop was a bit funny. I haven't seen that syntax in Node.js before. Although it seemed to kind of work ok on a basic loop, your issue was resolvable by using forEach() instead. I also added a rethrow in the upper callback if it's needed (you were only rethrowing the most nested error)
const SneaksAPI = require("sneaks-api");
const sneaks = new SneaksAPI();
const fs = require("fs");
let idList = ["FY2903", "FZ5246", "FZ5421"];
idList.forEach((shoeID) => {
sneaks.getProductPrices(shoeID, function (error, product) {
if (error) throw error;
var data = JSON.stringify(product);
fs.writeFile(`/Users/sam/Downloads/${shoeID}.json`, data, (err) => {
if (err) throw err;
console.log(`The file ${shoeID}.json has been saved!`);
});
});
});

Pipe NodeJS Stream to an Array

My use case is this: I am looking to read a CSV file in Node and get only the headers. I don't want to write the results of a read stream to a file, rather push the headers to an array once the file is read, so I can take that array and do something to it later on. OR, better yet, take the stream and as it is being read, transform it, then send it to an array. File is a contrived value. I am stuck at this point, where the current output of datafile is an empty array:
const fs = require('fs');
const parse = require('csv-parse');
const file = "my file path";
let dataFile = [];
rs = fs.createReadStream(file);
parser = parse({columns: true}, function(err, data){
return getHeaders(data)
})
function getHeaders(file){
return file.map(function(header){
return dataFile.push(Object.keys(header))
})
}
What do I need to do in order to get the results I need? I am expecting the headers to be found in an array as the end result.
Ok, so there is some confusing things in your code, and one mistake : you didn't actually call your code :)
First, a solution, add this line, after parser :
rs.pipe(parser).on('end', function(){
console.log(dataFile);
});
And magic, dataFile is not empty.
You stream the file from disk, pass it to the parser, then at the end, call a callback.
For the confusing parts :
parser = parse({columns: true}, function(err, data){
// You don't need to return anything from the callback, you give the impression that parser will be the result of getHeaders, it's not, it's a stream.
return getHeaders(data)
})
function getHeaders(file){
// change map to each, with no return, map returns an array of the return of the callback, you return an array with the result of each push (wich is the index of the new object).
return file.map(function(header){
return dataFile.push(Object.keys(header))
})
}
And finaly :
Please choose with ending line with ; or not, but not a mix ;)
You should end with something like :
const fs = require('fs');
const parse = require('csv-parse');
const file = "./test.csv";
var dataFile = [];
rs = fs.createReadStream(file);
parser = parse({columns: true}, function(err, data){
getHeaders(data);
});
rs.pipe(parser).on('end', function(){
console.log(dataFile);
});
function getHeaders(file){
file.each(function(header){
dataFile.push(Object.keys(header));
});
}

How to add many records to mongoDB from directory of JSON files?

I have about a million JSON files saved across many sub-directories of the directory "D:/njs/nodetest1/imports/source1/" and I want to import them into the collection "users" in my mongoDB database.
The following code correctly traverses through the file system. As you can see, it reads each item in the directory and if that item is a directory it reads each item in it. For each item that is not a directory it performs a some operations on it before sending a variable holding an to a function.
function traverseFS (path){
var files = fs.readdirSync(path);
for (var i in files){
var currentFile = path + '/' + files[i];
var stats = fs.statSync(currentFile);
if (stats.isFile())
runOnFile(currentFile);
else
traverseFS(currentFile);
}
}
traverseFS("D:/njs/nodetest1/imports/source1/")
Next, I run a few operations on the code (see below). This reads the file, parses it into a JSON object, reads two attributes of that object into variables,creates an object in the variable "entry" and passes the variable to another function.
function runOnFile(currentFile){
var fileText = fs.readFileSync(currentFile,'utf8');
var generatedJSON = JSON.parse(fileText);
var recordID = generatedJSON.recordID;
var recordText = generatedJSON.recordTexts;
var entry = {recordID:recordID, recordText:recordText};
insertRecord(entry);
}
The final function then should be used to insert the data into mongoDB. I think that this is where thing go wrong.
function insertRecord(entry){
var MongoClient = mongodb.MongoClient;
var MongoURL = 'mongodb://localhost:27017/my_database_name';
MongoClient.connect(MongoURL, function (err, db) {
var collection = db.collection('users');
collection.insert([entry], function (err, result) {
db.close();
});
});
}
I expected this to run through the file structure, reading the JSON files into objects and then inserting those objects into my mongoDB. Instead it reads the first file into the database and then stops/hangs.
Notes:
I don't want to use mongoimport because I don't want to insert all the data from these files into my MongoDB database. I however am not tied to any aspect of this approach. If some other solution exists I am open to it.
This connects to the database just fine. For each item in the directory this successfully creates an "entry" object and passes it to the insertRecord function. In other words, the problem must be occuring in the insertRecord section. But it obviously could be caused by something earlier in the process.
If I add error handling, no errors are produced. I have left the error handling out of this post because it clutters the readability of the code snippets.
As per mongodb2.2 (current latest) documentation, insert is deprecated
DEPRECATED
Use insertOne, insertMany or bulkWrite
So the short answer is probably to change collection.insert([entry], ...) to collection.insertOne(entry, ...) and you're done.
Then for the long answer, you say "about a million of json files", which typically deserves a full async approach with the least amount of overhead.
There are two (potential) bottlenecks in the sample code:
fs.readFileSync, this is a blocking operation
the connecting, inserting a record and closing the database connection
Both are executed "about a million of times". Granted, an import is not usually done over and over again and (hopefully) not on a machine which needs its performance for other important tasks. Still, the sample code can easily be made more robust.
Consider using the glob module to obtain the list of json file.
glob('imports/**/*.json', function(error, files) {...})
This provides you with the full list of files easily in an async fashion.
Then consider connecting to the database just once, insert everything and close once.
Maintaining more or less the same steps you have in the sample, I'd suggest something like:
var glob = require('glob'),
mongodb = require('mongodb'),
fs = require('fs'),
MongoClient = mongodb.MongoClient,
mongoDSN = 'mongodb://localhost:27017/my_database_name',
collection; // moved this to the "global" scope so we can do it only once
function insertRecord(json, done) {
var recordID = json.recordID || null,
recordText = json.recordText || null;
// the question implies some kind of validation/sanitation/preparation..
if (recordID && recordText) {
// NOTE: insert was changed to insertOne
return collection.insertOne({recordID: recordID, recordText: recordText}, done);
}
done('No recordID and/or recordText');
}
function runOnFile(file, done) {
// moved to be async
fs.readFile(file, function(error, data) {
if (error) {
return done(error);
}
var json = JSON.parse(data);
if (!json) {
return done('Unable to parse JSON: ' + file);
}
insertRecord(json, done);
});
}
function processFiles(files, done) {
var next = files.length ? files.shift() : null;
if (next) {
return runOnFile(next, function(error) {
if (error) {
console.error(error);
// you may or may not want to stop here by throwing an Error
}
processFiles(files, done);
});
}
done();
}
MongoClient.connect(mongoDSN, function(error, db) {
if (error) {
throw new Error(error);
}
collection = db.collection('users');
glob('imports/**/*.json', function(error, files) {
if (error) {
throw new Error(error);
}
processFiles(files, function() {
console.log('all done');
db.close();
});
});
});
NOTE: You can collect multiple "entry"-records to leverage the performance gain of multiple inserts using insertMany, though I have the feeling the inserted records are more complicated than described and it might give some memory issues if not handled correctly.
Just structure your data into one big array of objects, then run db.collection.insertMany.
I suggest you doing this using Promises:
const Bluebird = require('bluebird');
const glob = Bluebird.promisify(require('glob'));
const mongodb = require('mongodb');
const fs = Bluebird.promisifyAll(require('fs'));
const Path = require('path');
const MongoClient = mongodb.MongoClient;
const insertMillionsFromPath = Bluebird.coroutine(function *(path, mongoConnString) {
const db = yield MongoClient.connect(mongoConnString);
try {
const collection = db.collection('users');
const files = yield glob(Path.join(path, "*.json"));
yield Bluebird.map(
files,
Bluebird.coroutine(function *(filename) {
console.log("reading", filename);
const fileContent = yield fs.readFileAsync(filename);
const obj = JSON.parse(fileContent);
console.log("inserting", filename);
yield collection.insertOne(obj);
}),
{concurrency: 10} // You can increase concurrency here
);
} finally {
yield db.close();
}
});
insertMillionsFromPath("./myFiles", "mongodb://localhost:27017/database")
.then(()=>console.log("OK"))
.catch((err)=>console.log("ERROR", err));
In order to work, you will need to install the following packages:
npm install --save mongodb bluebird glob
and you will need to use node.js version 6 or greater, otherwise you will need to transpile your javascript (due to function *() generators usage).

Read json file and append a particular array to outer scope list - javascript - Nodejs

var dir = require('node-dir');
var path = require('path');
var fs = require('fs');
var root_directory = "C:/ProgramData/XXX/YYY/Resources/LocalLibrary";
function getitems(res, fileHandler){
dir.files(root_directory, function(err, files) {
if (err) throw err;
fileHandler(res, files);
});
};
function fileHandler(res, files) {
var finaldependency = [];
files.forEach(function (fileName) {
if (path.extname(fileName) === '.Item') {
var singleObj = {};
singleObj['key'] = fileName;
var content = fs.readFileSync(path.resolve(fileName), "utf8");
var json = JSON.parse(content);
singleObj['value'] = json.References;
finaldependency.push(singleObj);
}
});
res.json(finaldependency);
}
module.exports = function(app) {
// api ---------------------------------------------------------------------
// get all items
app.get('/api/item', function(req, res) {
// use mongoose to get all items in the database
getitems(res, fileHandler);
});
Steps Involved:
1.When receive a request process all the files present in the root_directory(it contains many nested subdirectories)
2. get the list of all the files with the help of node-dir module
3. pass the file list to a asynchronous fileHandler
My question ? I am doing a synchronous blocking style of file Handling(fs.readfileSync) !!!
and append the data as a key-value pair to the outer scope list finaldependency
Is this a right approach ?? or is there any optimized approach of doing this ???
Your fileHandler function might take long time hence it can block client request. Server performance in terms of request processing might get worse with increasing client request.
You can try executing the forEach loop in process.nextTick.

Categories

Resources