storing files in mongodb using gridfs and mongoose - javascript

I have the following code
gridfs.js has the following code that writes file bytes
exports.putFile = function(path, name, options, fn) {
var db;
db = mongoose.connection.db;
options = parse(options);
options.metadata.filename = name;
return new GridStore(db, name, "w", options).open(function(err, file) {
if (err) {
return fn(err);
}
return file.writeFile(path, fn);
});
};
The mongoose schema is defined below. The mongoose schema has a file name and the file itself.
var fs = require('fs');
var mongoose = require('mongoose');
var db = mongoose.connection;
var gridfs = require('./gridfs');
var Schema = mongoose.Schema;
var trackDocsSchema = mongoose.Schema(
{
_id : Schema.Types.ObjectId,
docName: 'string',
files: [ mongoose.Schema.Mixed ]
}
);
trackDocsSchema.methods.addFile = function(file, options, fn) {
var trackDocs;
trackDocs = this;
return gridfs.putFile(file.path, file.name, options, function(err, result) {
if (err) console.log("postDocsModel TrackDocs Error: " + err);
trackDocs.files.push(result);
return trackDocs.save(fn);
});
};
var TrackDocs = mongoose.model("TrackDocs", trackDocsSchema);
The server code that is invoked is below.
exports.uploadFile = function (req, res) {
console.log("uploadFile invoked");
var trackDocs, opts;
trackDocs = new TrackDocs();
trackDocs._id = mongoose.Types.ObjectId(req.body._id);
trackDocs.docName = req.body.docName;
opts = {
content_type: req.files.file.type
};
return trackDocs.addFile(req.files.file, opts, function (err, result) {
if (err) console.log("api TrackDocs Error: " + err);
console.log("Result: " + result);
trackDocs.save();
console.log("Inserted Doc Id: " + trackDocs._id);
return res.json(true);
});
};
});
When I run the folowing code, I get an error
api TrackDocs Error: RangeError: Maximum call stack size exceeded and nothing is added to GridFS. However, if I upload the same file again, it does save it into GridFS.
Notice there are two trackDocs.saves. I am not sure if that is causing the issue or something else.
I am new to NodeJS and Mongoose so any assitance will be much appreciated.
Thanks,
Melroy

Not sure whether this helps or not, but it has helped me in the past.
Not surprisingly, different browsers have different call stack sizes. Also not surprisingly, the method that they use to determine the call stack varies as well. The various call stack sizes I could measure are (give or take, might be off by 1 or 2):
We ran some tests ourselves in order to get the call stack sizes on other browsers/hosts/OSs using this simple code:
var i = 0;
function recurse () {
i++;
recurse();
}
try {
recurse();
} catch (ex) {
alert('i = ' + i + '\nerror: ' + ex);
}
http://javascriptrules.com/2009/06/30/limitation-on-call-stacks/
In saying that an option could be passing --max-stack-size to node.
Set max v8 stack size (bytes)
node --max-stack-size 32000 app.js
NOTE: Help prints it as --max-stack-size, in node -v v0.10.x+ you need to use --stack-size instead.

Related

need to do a file system at particular file location in nodejs

Actually I am trying to do zip conversion and need to save zip at particular folder as zip_folder created with my project folder. This is happening when I call some api. I can't able to do but if I use __dirname its working properly. Can anyone help me to comeout from this by giving your solutions. Thank you.
const fs = require('fs');
const archiver = require('archiver');
var file1 = '../zip_folder/scorm.zip';
var onlyPath = require('path').dirname('C:\Users\is9115\Desktop\node_moodle');
const mysql = require('../shared/connection');
// create a file to stream archive data to.
const archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
async function createzip()
{
const output = fs.createWriteStream(file1); // this is not working at file location
const output = fs.createWriteStream(__dirname+'/scorm.zip');//working but creating at root folder itself
fs.readFile('imsmanifest.xml', 'utf-8',async function(err, data) {
if (err) throw err;
var newValue = data.replace(/Var_Title/g, 'Golf');
fs.writeFile('imsmanifest1.xml', newValue, 'utf-8', function(err, data) {
if (err) throw err;
console.log('Done!');
})
})
archive.pipe(output);
const file2 = __dirname + '/imsmanifest1.xml';
archive.append(fs.createReadStream(file2), { name: 'imsmanifest.xml' });
archive.append('string cheese!', { name: 'file2.txt' });
archive.directory('scorm12schemadefinition/', false);
archive.file('imsmainfest1.xml', { name: 'imsmanifest.xml' });
archive.finalize();
}

How to solve timeout problem with electron-html-to node.js

I'm experiencing this timeout when trying to use electron to convert an html file to pdf. I'm running this js app through node.
`{ Error: Worker Timeout, the worker process does not respond after 10000 ms
at Timeout._onTimeout (C:\Users\Owner\Desktop\code\PDF-Profile-Generator\node_modules\electron-workers\lib\ElectronManager.js:377:21)
at ontimeout (timers.js:436:11)
at tryOnTimeout (timers.js:300:5)
at listOnTimeout (timers.js:263:5)
at Timer.processTimers (timers.js:223:10)
workerTimeout: true,
message:
Worker Timeout, the worker process does not respond after 10000 ms,
electronTimeout: true }`
I do not know too much about electron. I have not been able to try too much to try to debug it. The js code is meant to generate an html file based on user input, pulling from a github profile. Then, that html file needs to be converted to a pdf file.
My js code is as follows:
const fs = require("fs")
const convertapi = require('convertapi')('tTi0uXTS08ennqBS');
const path = require("path");
const generate = require("./generateHTML");
const inquirer = require("inquirer");
const axios = require("axios");
const questions = ["What is your Github user name?", "Pick your favorite color?"];
function writeToFile(fileName, data) {
return fs.writeFileSync(path.join(process.cwd(), fileName), data);
};
function promptUser() {
return inquirer.prompt([
{
type: "input",
name: "username",
message: questions[0]
},
{
type: "list",
name: "colorchoice",
choices: ["green", "blue", "pink", "red"],
message: questions[1]
}
])
};
function init() {
promptUser()
.then(function ({ username, colorchoice }) {
const color = colorchoice;
const queryUrl = `https://api.github.com/users/${username}`;
let html;
axios
.get(queryUrl)
.then(function (res) {
res.data.color = color
const starArray = res.data.starred_url.split(",")
res.data.stars = starArray.length
console.log(res)
html = generate(res.data);
console.log(html)
writeToFile("profile.html", html)
})
var convertFactory = require('electron-html-to');
var conversion = convertFactory({
converterPath: convertFactory.converters.PDF
});
conversion({ file: './profile.html' }, function (err, result) {
if (err) {
return console.error(err);
}
console.log(result.numberOfPages);
console.log(result.logs);
result.stream.pipe(fs.createWriteStream(__dirname + '/profile.pdf'));
conversion.kill(); // necessary if you use the electron-server strategy, see bellow for details
});
// convertapi.convert('pdf', { File: './profile.html' })
// .then(function (result) {
// // get converted file url
// console.log("Converted file url: " + result.file.url);
// // save to file
// return result.file.save(__dirname + "/profile.pdf");
// })
// .then(function (file) {
// console.log("File saved: " + file);
// });
})
}
init();
I had a similar problem. I had installed multiple versions of Electron (electron, electron-html-to, electron-prebuilt), and the problem was resolved when I deleted the older versions in package.json so only one was left. The assumption is that they were interfering with each other.
So check the installed versions of electron, because the problem might be there rather than your code.

Adding Data to Event Emitter

I am using "proxy-lists": "^1.16.0" package to obtain proxies.
I would like to save all incoming Array-Objects into my own array to later save it to the db.
When running the below example my array is empty and no file is written:
const ProxyLists = require('proxy-lists');
const fs = require('fs');
global.__basedir = __dirname;
const options = {
countries: null
};
// `gettingProxies` is an event emitter object.
const gettingProxies = ProxyLists.getProxies(options);
const data = []
gettingProxies.on('data', function (proxies) {
console.log(proxies);
data.push(proxies)
});
gettingProxies.on('error', function (error) {
console.error(error);
});
gettingProxies.once('end', function () {
fs.writeFile(__basedir + "data/file.txt", data, function (err) {
if (err) {
return console.log(err);
}
console.log("The file was saved!");
});
});
Any suggestions what I am doing wrong?
I appreciate your replies!
Looks good to me, but when I tested locally, I see that there was an issue concatenating your paths. Try __basedir + "/data/file.txt" (or use path.join)

Using promisses on Node JS

I'm trying to use a promise on this code:
//Listando arquivos
app.post('/readList', function(req, res) {
var cleared = false
var readList = new Promise(function(resolve, reject){
fs.readdir(req.body.path, (err, files) => {
files.forEach(file => {
console.log(file)
var fileDetail = {
name: '',
local: true,
filetype: 'fas fa-folder-open',
filepath: '',
isFile: false
}
if(!cleared){
listedFiles = []
cleared = true
}
fileDetail.name = file
fileDetail.filepath = req.body.path + file
fs.stat(req.body.path + file, function(err, stats) {
fileDetail.isFile = stats.isFile()
if(stats.isFile()) fileDetail.filetype = 'far fa-file-alt'
else fileDetail.filetype = 'fas fa-folder-open'
})
listedFiles.push(fileDetail)
})
})
})
readList.then(
console.log('vorta'),
res.end(JSON.stringify(listedFiles))
)
})
I've putted this line to show the itens listing:
console.log(file)
And put this line to execute after promise:
readList.then(
console.log('vorta'),
res.end(JSON.stringify(listedFiles))
)
I don't know where is the mistake, but console is showing 'vorta' before the files names.
What am I doing wrong?
Here you're passing two params:
readList.then(
//#1 In this case you're executing the log function and cause that the message is being printed.
console.log('vorta'),
res.end(JSON.stringify(listedFiles)) //# 2
)
So, you need to pass a function
readList.then(function() {
console.log('vorta');
res.end(JSON.stringify(listedFiles));
})
Further, you need to call the function resolve within the async logic.
As I said in my earlier comment, there are at least four problems here:
You aren't calling resolve(listedFiles) to resolve the promise so its .then() handler is never called
You need to pass a single function to .then()
You have no error handling for your async operations
You seem to be assuming that fs.stat() is synchronous when it is not
The best way to attack this problem is to promisify all your asynchronous functions and then use promises for controlling the flow and the error handling. Here's a way to fix all of these issues:
const util = require('util');
const fs = require('fs');
const readdirAsync = util.promisify(fs.readdir);
const statAsync = util.promisify(fs.stat);
//Listando arquivos
app.post('/readList', function(req, res) {
// add code here to sanitize req.body.path so it can only
// point to a specific sub-directory that is intended for public consumption
readdirAsync(req.body.path).then(files => {
return Promise.all(files.map(file => {
let fileDetail = {
name: file,
local: true,
filepath: req.body.path + file
};
return statAsync(fileDetail.filepath).then(stats => {
fileDetail.isFile = stats.isFile();
fileDetail.filetype = fileDetail.isFile ? 'far fa-file-alt' : 'fas fa-folder-open';
return fileDetail;
});
}));
}).then(listedFiles => {
res.json(listedFiles);
}).catch(err => {
console.log(err);
res.sendStatus(500);
});
});
FYI, this is kind of a dangerous implementation because it lists files on ANY path that the user passes in so any outsider can see the entire file listing on your server's hard drive. It could even list network attached drives.
You should be limiting the scope of the req.body.path to only a specific file hieararchy that is intended for public consumption.
You need to pass a function to then.
As it stands, you are calling log and end immediately and passing their return values.
Here is working copy of your code, I have made few changes that you can omit since those are only to give you a working code:
var express = require('express');
var fs = require('fs');
var app = express();
app.post('/readList', function(req, res) {
//Assuming sample data coming in req.body
//Remove this when you run at you side
req.body = {
path: 'temp_dir'
};
var cleared = false;
var listedFiles = [];
//Promising readList :)
function readList(){
return new Promise(function (resolve, reject) {
// Suppose req.body.path is 'temp_dir' and it has 2 files
fs.readdir(req.body.path, (err, files) => {
console.log(files);
//in following commented code you can do what you need
/*files.forEach(file = > {
console.log(file);
var fileDetail = {
name: '',
local: true,
filetype: 'fas fa-folder-open',
filepath: '',
isFile: false
}
if (!cleared) {
listedFiles = [];
cleared = true;
}
// really? you can think of it later!!
fileDetail.name = file;
fileDetail.filepath = req.body.path + file
// I changed below to avoid surprises for you in data!
const stats = fs.statSync(req.body.path + file);
fileDetail.isFile = stats.isFile();
if (stats.isFile())
fileDetail.filetype = 'far fa-file-alt';
else
fileDetail.filetype = 'fas fa-folder-open'
listedFiles.push(fileDetail);
});*/
resolve(listedFiles);
});
});
}
readList().then((data) => {
console.log('vorta');
// data here will be containing same data as listedFiles so choose your way of doing, I would recommend to go with data
res.send(JSON.stringify(listedFiles)); // fine
// res.send(JSON.stringify(data)); // better and recommended
});
})
app.listen(process.env.PORT || 3000);
console.log('Listening on port 3000');

What is the correct way of passing a Mongoose object into the MongoDB underlying connection insert() method

I need to insert many thousands of documents into MongoDB. I want to use Mongoose for its casting properties, etc. However I cannot figure out how to pass the generated instances to the MongoDB connection. I have tried this:
var fs = require('fs');
var mongoose = require('mongoose');
var config = JSON.parse(fs.readFileSync("./config.json"));
mongoose.connect(config.mongoDBUrl);
db = mongoose.connection;
db.once('open', function () {
var TestSchema = new mongoose.Schema({
testStr : String
});
var Model = mongoose.model('test_schema_2', TestSchema);
var inst = new Model();
inst.testStr = "EWAFWEFAW";
// This works.
db.collection('test_schema_2').insert({ testStr : 'My Test Str'}, {}, function (err) {
if (err) {
console.log(err);
} else {
console.log('Written.');
db.close();
}
});
// This doesn't.
db.collection('test_schema_2').insert(inst, {}, function (err) {
if (err) {
console.log(err);
} else {
console.log('Written.');
db.close();
}
});
});
In the second case, I get: "[RangeError: Maximum call stack size exceeded]"
What is Mongoose breaking behind the scenes that stops this from working, and how can I make it work?
To save an instance of a model you just have to do
inst.save(function(err) {
//Do something here
});

Categories

Resources