Can ShardingTest() pass non-default dbpath to mongod? - javascript

I'm running mongodb 3.0.6 (homebrew) on OS X 10.11/15A263e and can't figure my way through the js code for ShardingTest to specify a non-default data file path.
I want to avoid messing with symlinks or actual data files in /data for reasons but more importantly want to learn hot get the code below that class ShardingTest to work:
manoa:dev mike$ mongo --nodb
MongoDB shell version: 3.0.6
> var options = { shardOptions : { dbpath : "/usr/local/var/mongodb" } };
> cluster = new ShardingTest( { shards : 3 }, { other : options } );
Resetting db path '/data/db/test0'
2015-08-31T07:54:03.707-0500 E QUERY Error: boost::filesystem::create_directory: No such file or directory: "/data/db/test0"
at Function.MongoRunner.runMongod (src/mongo/shell/servers.js:589:13)
at new ShardingTest (src/mongo/shell/shardingtest.js:259:36)
at (shell):1:11 at src/mongo/shell/servers.js:589
I looked over this helpful post, but my JS is too weak to generalize from the help there to figure out how dbpath is set and which arguments will override the default path.
Could someone propose a way to debug this script or provide the actual syntax to feed my preferred db path to the ShardingTest function?

MongoRunner.dataPath is used to construct db paths:
this.getDBPaths = function() {
return _alldbpaths.map((path) => {
return MongoRunner.dataPath + path;
});
};
So you might set it to whatever directory you want, e.g:
// don't forget the trailing slash
MongoRunner.dataPath = '/home/your_user/data/db/'

Related

Mongoose: Unable To Save Data To Empty Object

Here is my situation:
I have a simple Schema that has an object called commands which I originally set to {}.
commands:{}
When I went to add a document to the database, I looped through a file of commands and added them to the object by doing command["command"] = "blah blah";. The way I did that is as follows (The code I work is quite old):
Guild.findOne({guildId:serverId}).then(x=>{
x.commands = {};
for(let command of Object.values(commandInfo)){
if(!command.name) continue;
const getSubs = ()=>{
const subArray = {};
if(!command.subCommands) return;
for(let subs of Object.values(command.subCommands)){
subArray[subs.name] = {
access:subs.access,
disabled:subs.disabled
}
}
x.commands[command.name].subCommands = subArray;
}
x.commands[command.name] = {
access:command.access,
bounded:command.bounded,
disabled:command.disabled,
}
getSubs();
}
console.log("Success!");
x.save();
});
The commands contain the following (I'll use two commands for my example):
work:{
access:"All",
bounded:"A Channel",
disabled:false
},
profile:{
access:"All",
bounded:"Another Channel",
disabled:false,
subCommands:{
bio:{
access:"All",
disabled:true
}
register:{
access:"All",
disabled:false
}
}
Fastforwarding to now, I made a config file that is supposed to edit these commands. The code I wrote works perfectly fine; however, Mongoose is unable to save any of the command's changes. Everything else in my Schema works in the config file except anything related to commands.
I know what the problem. Mongoose can't find the command in the Schema and gets confused somehow. And I know I could fix it by adding all of the commands and their properties to the Schema. The only problem with that is it's tedious and is not a clean solution to me adding any new commands. Plus, I have all of the information for the commands in a separate file, so I don't want to rewrite what I already wrote.
My question is: is there a workaround to my problem? I tried setting commands to Schema.Types.Mixed, but that didn't fix it. I also searched all around for something similar but could not find anything.
Here is the full Schema as requested:
const {Schema, model} = require("mongoose");
const settings = require("../utils/settings");
const guildSchema = Schema({
guildId:String,
symbols:{
prefix:{type:String, default:";"},
currency:{type:String, default:"$"}
},
channels:{
casinoChannels:Array,
fineChannels:Array
},
commands:Schema.Types.Mixed,
hierarchy:[{
name:String,
role:{type:String, default:""},
members:Array
}],
users:[{
userId:String,
cash:Number,
bank:Number,
items:[{
name:String,
amount:Number
}],
timers:{
work:String,
crime:String,
hack:String,
steal:String,
daily:String,
weekly:String,
marry:String,
divorce:String,
idea:String,
report:String,
quiz:String
},
fine:{
fineId:String,
description:String,
report:String,
pay:Number
},
used:String
}],
});
guildSchema.virtual("addUser").set(function(id){
const user = {
userId:id,
cash:settings.cash,
bank:settings.bank
};
this.users.push(user);
return user;
});
module.exports = model("Servers", guildSchema);
Use findOneAndUpdateto update the existing document in the collection.
let the object that you want to update in "commands" in mongodb be in x variable.
Guild.findOneAndUpdate({guildId : serverId},{commands : x));
The object which you have stored in x variable will directly be updated/replaced with this new data when a match is found.
UPDATE TO NOT COMPLETELY REPLACE EXISTING OBJECT
let guild = Guild.findOne({guildId : serverId});
if(guild){
// guild.commands here will have the existing object.
// Modify it as required and then use findOneAndUpdate again
}
The solution is quite simple. I took it from this site: https://mongoosejs.com/docs/2.7.x/docs/schematypes.html
To give credit:
Since it is a schema-less type, you can change the value to anything else you like, but Mongoose loses the ability to auto detect/save
those changes. To "tell" Mongoose that the value of a Mixed type has
changed, call the .markModified(path) method of the document passing
the path to the Mixed type you just changed.
So, whenever I update the commands (aka whenever I change the access, bounded, or disabled variables), I would use the mark modified method on the command function. My code is:
guildSchema.virtual("saveCommands").set(function(name){
if(["access", "bounded", "disabled"].includes(name)) this.markModified('commands');
});

Model return values from another file not working with Sequelize.js

I have the following code that does not work currently.
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models.
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: true, force: false }).then(function() {
models.users.create({
name: 'joe',
loggedIn: true
}).then( task => {
console.log("saved user!!!!!");
});
process.exit();//close the nodeJS Script
}).catch(function(error) {
console.log(error);
});
sequelize-lib.js
var Sequelize = require('sequelize');
exports.getSequelizeConnection = function(stage){
var argv = require('minimist')(process.argv.slice(2)); //If this file is being used in a script, this will attempt to get information from the argument stage passed if it exists
//Change connection settings based on stage variable. Assume localhost by default.
var dbname = argv['stage'] ? argv['stage']+"_db" : 'localdb';
var dbuser = argv['stage'] ? process.env.RDS_USERNAME : 'admin';
var dbpass = argv['stage'] ? process.env.RDS_PASSWORD : 'local123';
var dbhost = argv['stage'] ? "database-"+argv['stage']+".whatever.com" : 'localhost';
//If state variable used during require overide any arguments passed.
if(stage){
dbname = stage+"_db";
dbuser = process.env.RDS_USERNAME
dbpass = process.env.RDS_PASSWORD
dbhost = "database-"+stage+".whatever.com"
}
var connection = new Sequelize(dbname,dbuser,dbpass, {
dialect: 'mysql',
operatorsAliases: false, //This gets rid of a sequelize deprecated warning , refer https://github.com/sequelize/sequelize/issues/8417
host: dbhost
});
return connection;
}
exports.setModels = function(connection){
//Import all the known models for the project.
const fs = require('fs');
const dir = __dirname+'/../models';
var models = {}; //empty model object for adding model instances in file loop below.
//#JA - Wait until this function finishes ~ hence readdirSync vs regular readdir which is async
fs.readdirSync(dir).forEach(file => {
console.log(file);
//Split the .js part of the filename
var arr = file.split(".");
var name = arr[0].toLowerCase();
//Create a modle object using the filename as the reference without the .js pointing to a created sequelize instance of the file.
models[name] = connection.import(__dirname + "/../models/"+file);
})
//Showcase the final model.
console.log(models);
return models; //This returns a model with reference to the sequelize models
}
I can't get the create command to work however with this setup. My guess is the variables must not be passing through correctly somehow. I'm not sure what I'm doing wrong?
The create command definitely works because if in the sequelize-lib.js I modify the setModels function to this...
exports.setModels = function(connection){
//Import all the known models for the project.
const fs = require('fs');
const dir = __dirname+'/../models';
var models = {}; //empty model object for adding model instances in file loop below.
//#JA - Wait until this function finishes ~ hence readdirSync vs regular readdir which is async
fs.readdirSync(dir).forEach(file => {
console.log(file);
//Split the .js part of the filename
var arr = file.split(".");
var name = arr[0].toLowerCase();
//Create a modle object using the filename as the reference without the .js pointing to a created sequelize instance of the file.
models[name] = connection.import(__dirname + "/../models/"+file);
models[name].create({
"name":"joe",
"loggedIn":true
});
})
//Showcase the final model.
console.log(models);
return models; //This returns a model with reference to the sequelize models
}
Then it works and I see the item added to the database! (refer to proof image below)
Take note, I am simply running create on the variable at this point. What am I doing wrong where the model object is not passing between files correctly? Weird part is I don't get any errors thrown in the main file?? It's as if everything is defined but empty or something and the command is never run and nothing added to the database.
I tried this in the main file also and no luck.
models["users"].create({
name: 'joe',
loggedIn: true
}).then( task => {
console.log("saved user!!!!!");
});
The purpose of this all is to read models automatically from the model directory and create instances that are ready to go for every model, even if new one's are added in the future.
UPDATE::
So I did another test that was interesting, it seems that the create function won't work in the .then() function of the sync command. It looks like it was passing it correctly though. After changing the front page to this...
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models using connection previosly created.
models["users"].create({
"name":"joe",
"loggedIn":true
});
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: true, force: false }).then(function() {
process.exit();//close the nodeJS Script
}).catch(function(error) {
console.log(error);
});
Doing this seems to get create to work. I'm not sure if this is good form or not though since the database might not be created at this point? I need a way to get it to work in the sync function.
Well I answered my question finally, but I'm not sure I like the answer.
var config = require('./libs/sequelize-lib.js');
var connection = config.getSequelizeConnection();//Choosing to not pass in variable this time since this should only run via script.
var models = config.setModels(connection);//Creates live references to the models using connection previosly created.
//Alter table as needed but do NOT force the change. If an error occurs we will fix manually.
connection.sync({ alter: false, force: false }).then( () => {
models["users"].create({
"name":"joe",
"loggedIn":true
}).then( user => {
console.log("finished, with user.name="+user.name);
process.exit();
}).catch( error => {
console.log("Error Occured");
console.log(error);
});
}).catch(function(error) {
console.log(error);
});
turns out that process.exit was triggering before create would occur because create happens async. This means that all my code will have to constantly be running through callbacks...which seems like a nightmare a bit. I wonder if there is a better way?

javascript: Creating directories from file path recursively adding timestamp to the filename

I am using the below code to split up a user provided path, create all intermediate dirs in the path and attach a timestamp to the ending file name. I am splitting the path with / first and then using forEach over the resulting array. Is there any better way/direct lib function to do this in javascript / nodejs.
function test(usrPath) {
var locMatches = usrPath.split("/")
locMatches.forEach (function (item) {
location = pathUtils.join(location,item)
if (!fs.existsSync(location)) {
fs.mkdirSync(location)
}
})
return pathUtils.join (location,usrPath + (new Date).toISOString().replace(/(^\d\d\d\d)|-|:|(\..*)/g,"").replace("T","_")+".log")
}
Ok, so there are path utils that allow to make the implementation better across platform.
Also, it gives a better managed access for working with path elements like root, dir, filename and extension. pathUtils.sep allows working on the dir elements more cross platform.
var pathUtils = require('path')
function test(usrPath) {
var pathElements = pathUtils.parse(usrPath)
pathElements.dir.split(pathUtils.sep).forEach (function (item) {
location = pathUtils.join(location,item)
if (!fs.existsSync(location)) {
fs.mkdirSync(location)
}
})
return pathUtils.join (location,pathElements.name + (new Date).toISOString().replace(/(^\d\d\d\d)|-|:|(\..*)/g,"").replace("T","_")+pathElements.ext.replace(/^$/i,".log"))
}

gulp-load-plugins.sourcemaps.init() TypeError: Cannot read property 'init' of undefined

I'm trying to adapt a gulp file to my purposes and I'm running into issues. I only care about one task:
gulp.task('js:browser', function () {
return mergeStream.apply(null,
Object.keys(jsBundles).map(function(key) {
return bundle(jsBundles[key], key);
})
);
});
It is using browserify to condense my bundle into a usable single file. It uses these two methods and this object:
function createBundle(src) {
//if the source is not an array, make it one
if (!src.push) {
src = [src];
}
var customOpts = {
entries: src,
debug: true
};
var opts = assign({}, watchify.args, customOpts);
var b = watchify(browserify(opts));
b.transform(babelify.configure({
stage: 1
}));
b.transform(hbsfy);
b.on('log', plugins.util.log);
return b;
}
function bundle(b, outputPath) {
var splitPath = outputPath.split('/');
var outputFile = splitPath[splitPath.length - 1];
var outputDir = splitPath.slice(0, -1).join('/');
console.log(outputFile);
console.log(plugins);
return b.bundle()
// log errors if they happen
.on('error', plugins.util.log.bind(plugins.util, 'Browserify Error'))
.pipe(source(outputFile))
// optional, remove if you don't need to buffer file contents
.pipe(buffer())
// optional, remove if you dont want sourcemaps
.pipe(plugins.sourcemaps.init({loadMaps: true})) // loads map from browserify file
// Add transformation tasks to the pipeline here.
.pipe(plugins.sourcemaps.write('./')) // writes .map file
.pipe(gulp.dest('build/public/' + outputDir));
}
var jsBundles = {
'js/polyfills/promise.js': createBundle('./public/js/polyfills/promise.js'),
'js/polyfills/url.js': createBundle('./public/js/polyfills/url.js'),
'js/settings.js': createBundle('./public/js/settings/index.js'),
'js/main.js': createBundle('./public/js/main/index.js'),
'js/remote-executor.js': createBundle('./public/js/remote-executor/index.js'),
'js/idb-test.js': createBundle('./public/js/idb-test/index.js'),
'sw.js': createBundle(['./public/js/sw/index.js', './public/js/sw/preroll/index.js'])
};
When I run the gulp task js:bower I get the following error coming from the the .pipe(plugins.sourcemaps.init({loadMaps: true})) expression:
TypeError: Cannot read property 'init' of undefined
I know that the lines are optional and I can just comment them out, but I do want them. When I run the code in the example file it works properly, when I run it in my gulp file it gives me the error. Any suggestions on what I might be missing? Thanks!
gulp-load-plugins analyzes the contents of your package.json file to find out which Gulp plugins you have installed. Make sure that gulp-sourcemaps is among the "devDependencies" defined there. If not run
npm install --save-dev gulp-sourcemaps
There's a small chance that your problem is related to lazy loading the sourcemaps plugin. If the above doesn't help try requiring gulp-load-plugins like this:
var plugins = require('gulp-load-plugins')({lazy:false});

Firefox Addon Reading Local XPI, File and Directory Listing

Following a chat in #amo-editors, I was wondering if the following are possible from a Firefox Addon:
Opening a local XPI for reading
Listing all files in above XPI with their sizes
Reading selected files
Absolutely possible.
1 + 2) Have to use nsIZipReader to read the xpi. This gives you all files within it.
3) To read contents you have to use zip readers getInputStream function and then wrap it in stream instance, then read it with argument of entry.realSize as read on stream takes characters to read.
MDN :: nsIZipWriter
MDN :: nsIZipReader
edit: i was curious. i think i got it. heres example of how to make it dump contents of a zip (list all files within). see the console.log(entryPointer) that spits out the "zip path". it also reads the contents of the files.
var zr = Cc["#mozilla.org/libjar/zip-reader;1"].createInstance(Ci.nsIZipReader);
Cu.import('resource://gre/modules/osfile.jsm');
Cu.import('resource://gre/modules/FileUtils.jsm');
var reusableStreamInstance = Cc['#mozilla.org/scriptableinputstream;1'].createInstance(Ci.nsIScriptableInputStream);
//var pathExtFolder = OS.Path.join(OS.Constants.Path.profileDir, 'extensions');
var pathToXpiToRead = OS.Path.join(OS.Constants.Path.profileDir, 'extensions', 'PortableTester#jetpack.xpi');
var nsiFileXpi = new FileUtils.File(pathToXpiToRead);
//Services.ww.activeWindow.alert(pathToXpiToRead);
try {
zr.open(nsiFileXpi); //if file dne it throws here
var entries = zr.findEntries('*');
while (entries.hasMore()) {
var entryPointer = entries.getNext(); //just a string of "zip path" (this means path to file in zip, and it uses forward slashes remember)
var entry = zr.getEntry(entryPointer); // should return true on `entry instanceof Ci.nsIZipEntry`
console.log('entryPointer', entryPointer);
/* CONSOLE OUTPUT
* "entryPointer" "bootstrap.js" Scratchpad/1:18
*/
console.info('entry', entry);
/* CONSOLE OUTPUT
* "entry" XPCWrappedNative_NoHelper { QueryInterface: QueryInterface(), compression: Getter, size: Getter, realSize: Getter, CRC32: Getter, isDirectory: Getter, lastModifiedTime: Getter, isSynthetic: Getter, permissions: Getter, compression: 8 } Scratchpad/1:19
*/
if (!entry.isDirectory) {
var inputStream = zr.getInputStream(entryPointer);
reusableStreamInstance.init(inputStream);
var fileContents = reusableStreamInstance.read(entry.realSize);
console.log('contenst of file=', fileContents);
} else {
console.log('is directory, no stream to read');
}
}
} catch (ex) {
console.warn('exception occured = ', ex);
if (ex.name == 'NS_ERROR_FILE_NOT_FOUND') {
Services.ww.activeWindow.alert('XPI at path does not exist!\n\nPath = ' + pathToXpiToRead);
}
} finally {
zr.close();
console.log('zr closed');
//Cu.forceGC(); //im not sure shoud i do this here?
}
I'm not sure if I should do a Cu.forceGC() in the finally, maybe #nmaier can advise us on that.
I'm also not sure if I handled reading the input stream properly, it works, but i dont know memory wise. I did .read(entry.realSize) first time doing this.
variable viewer on entry:

Categories

Resources