Grunt-scaffold after() function access to prompt answers - javascript

The docs for the NPM package grunt-scaffold lacked any information really on its after() property/function.. I have a grunt file which creates a new directory for a new script and copies boilerplate files into it from the designated template folder.. The desire is to finish the grunt scaffold:new_script command and have it log out the location of the newly generated folder.
Gruntfile.js
module.exports = function(grunt) {
grunt.initConfig({
scaffold: {
new_script: {
options: {
questions: [{
name: 'script_name',
type: 'input',
message: 'Script name, catterpillar_case?(e.g. \'new_script\'):'
}],
template: {
"scripts/etl_template/": "scripts/{{script_name}}/",
},
after: function(){
console.log("New script generated in new folder scripts/{{script_name}}")
}
}
}
}
});
grunt.loadNpmTasks('grunt-scaffold');
grunt.registerTask('default', ['scaffold']);
};
However, the ouput is
-bash-4.1$ grunt scaffold:new_script
Running "scaffold:new_script" (scaffold) task
? Script name, catterpillar_case?(e.g. 'new_script'): test_grunt
New script generated in new folder scripts/{{script_name}}
Done.
This did not do the string replacing as it did when it created the scripts/test_grunt folder! As you can see the documentation almost doesn't exist for that after() functionality, and I'm wondering if I can use javascript"system argume

An example was not given in the documentation for the after() function, but if you use the same result parameter as in the example given for filter(), you can access the answer values via their names.
Gruntfile.js
module.exports = function(grunt) {
grunt.initConfig({
scaffold: {
new_script: {
options: {
questions: [{
name: 'script_name',
type: 'input',
message: 'Script name, catterpillar_case?(e.g. \'new_script\'):'
}],
template: {
"scripts/etl_template/": "scripts/{{script_name}}/",
},
after: function(result){
console.log("New script generated in new folder scripts/" + result.script_name)
}
}
}
}
});
grunt.loadNpmTasks('grunt-scaffold');
grunt.registerTask('default', ['scaffold']);
};
Output
-bash-4.1$ grunt scaffold:new_script
Running "scaffold:new_script" (scaffold) task
? Script name, catterpillar_case?(e.g. 'new_script'): test_grunt
New script generated in new folder scripts/test_grunt
Done.

Related

You may need an appropriate loader to handle this file type, currently no loaders are configured to process this file. C#

I'm trying to use the $.getJSON() function in javascript but I get this error:
"You may need an appropriate loader to handle this file type,
currently no loaders are configured to process this file."
Here's my javascript:
$(document).ready(function () {
console.log("Hello from riskanalysis.delete.js");
var categoryTd = $('#categoryId');
var categoryText = categoryTd.text();
var categoryInt = parseInt(categoryText);
console.log(categoryInt);
console.log(categoryText);
// Send an AJAX request
$.getJSON("/riskanalysis/getCategoryNameById?id=" + categoryInt)
console.log("Hello before");
.done(function (categoryName) {
// On success
console.log("Category name is: " + categoryName);
categoryTd.text(categoryName);
});
console.log("Hello after");
});
Here's my webpack.config.js file:
module.exports = {
entry:
{
shared: './src/shared.js',
home: './src/home/home.js',
riskanalysisSearch: './src/riskanalysis/riskanalysis.search.js',
riskanalysisCreate: './src/riskanalysis/riskanalysis.create.js',
riskanalysisDelete: './src/riskanalysis/riskanalysis.delete.js',
dropdown: './src/dropdown/dropdown.js',
actionplan: './src/actionplan/actionplan.js'
},
output: {
filename: '../wwwroot/js/[name].js'
},
optimization: {
splitChunks: {
cacheGroups: {
vendor: {
test: /[\\/]node_modules[\\/](jquery)[\\/]/,
name: 'vendor',
chunks: 'all'
}
}
}
}
};
Here's the error:
The weird thing is that in all my other javascript files it is working perfectly.
Does anyone have an idea what could be wrong? Thanks already!
The cause of the error is the line console.log("Hello before");, move this line above $.getJSON("/riskanalysis/getCategoryNameById?id=" + categoryInt) so that it looks like this:
console.log("Hello before");
$.getJSON("/riskanalysis/getCategoryNameById?id=" + categoryInt)
.done(function (categoryName) {
// On success
console.log("Category name is: " + categoryName);
categoryTd.text(categoryName);
});
console.log("Hello after");
The line that shows this error is: Module parse failed: Unexpected token (13:12)
Regarding the error You may need an appropriate loader to handle this file type, currently no loaders are configured to process this file. this is because the full file extension for this file is .delete.js which makes webpack think that this isn't a normal .js file. Webpack will try to find a file loader for .delete.js files, as one doesn't exist it will fall back to the .js file loader. Due to the above error the js parser will fail resulting in this error.

Refactored watch task using gulp v4 doesn't work

I'm refactoring my gulpfile now I'm using gulp v4 and am having an issue with gulp watch not running my stylesCompileIncremental function. Any help or pointers would be much appreciated.
My refactoring includes:
Switching to using functions instead of gulp.task
Using series and parallel as per the docs
Exporting public tasks at the bottom of my gulpfile ie exports.stylesWatch = stylesWatch;
Adding callbacks in functions to tell Gulp the function is complete
The code for the affected tasks is as follows (directory paths are stored in package.json file hence pathConfig.ui... values):
// Compile only particular Sass file that has import of changed file
function stylesCompileIncremental(cb) {
sassCompile({
source: getResultedFilesList(changedFilePath),
dest: pathConfig.ui.core.sass.dest,
alsoSearchIn: [pathConfig.ui.lib.resources]
});
cb();
}
// Compile all Sass files and watch for changes
function stylesWatch(cb) {
createImportsGraph();
var watcher = gulp.watch(pathConfig.ui.core.sass.src + '**/*.scss', gulp.parallel(devServReloadStyles));
watcher.on('change', function(event) {
changedFilePath = event;
});
cb();
}
// reload css separated into own function. No callback needed as returning event stream
function reloadCss() {
return gulp.src(generateFilePath)
.pipe($.connect.reload()); // css only reload
}
function devServReloadStyles(cb) {
gulp.series(stylesCompileIncremental, reloadCss);
cb();
}
When I run gulp stylesWatch using my refactored code I get the below output (notice the stylesCompileIncremental task is not run):
So my watch tasking is successfully running but there's something wrong when the devServReloadStyles is run for the stylesCompileIncremental function to not kick in.
The original code before refactoring (when using gulp v3) is below:
// Compile only particular Sass file that has import of changed file
gulp.task('styles:compile:incremental', () => {
return sassCompile({
source: getResultedFilesList(changedFilePath),
dest: pathConfig.ui.core.sass.dest,
alsoSearchIn: [pathConfig.ui.lib.resources]
});
});
// Compile all Sass files and watch for changes
gulp.task('styles:watch', () => {
createImportsGraph();
gulp.watch(
pathConfig.ui.core.sass.src + '**/*.scss',
['devServ:reload:styles']
).on('change', event => changedFilePath = event.path);
});
// Reload the CSS links right after 'styles:compile:incremental' task is returned
gulp.task('devServ:reload:styles', ['styles:compile:incremental'], () => {
return gulp.src(generateFilePath) // css only reload
.pipe($.connect.reload());
});
The original task output when running styles:watch is this:
And this is the sassCompile variable used inside stylesCompileIncremental which I've currently not changed in anyway.
/**
* Configurable Sass compilation
* #param {Object} config
*/
const sassCompile = config => {
const sass = require('gulp-sass');
const postcss = require('gulp-postcss');
const autoprefixer = require('autoprefixer');
const postProcessors = [
autoprefixer({
flexbox: 'no-2009'
})
];
return gulp.src(config.source)
.pipe($.sourcemaps.init({
loadMaps: true,
largeFile: true
}))
.pipe(sass({
includePaths: config.alsoSearchIn,
sourceMap: false,
outputStyle: 'compressed',
indentType: 'tab',
indentWidth: '1',
linefeed: 'lf',
precision: 10,
errLogToConsole: true
}))
.on('error', function (error) {
$.util.log('\x07');
$.util.log(error.message);
this.emit('end');
})
.pipe(postcss(postProcessors))
.pipe($.sourcemaps.write('.'))
.pipe(gulp.dest(config.dest));
};
UPDATE
This is due to an issue with my devServReloadStyles function, although I'm still unsure why. If I change my stylesWatch function to use the original devServ:reload:styles task stylesCompileIncremental gets run.
// Compile all Sass files and watch for changes
function stylesWatch(cb) {
createImportsGraph();
var watcher = gulp.watch(pathConfig.ui.core.sass.src + '**/*.scss', gulp.parallel('devServ:reload:styles'));
watcher.on('change', function(event) {
changedFilePath = event;
});
cb();
}
It would still be good to not use the old task and have this as a function though.
Can anybody tell me why my refactored version doesn't work and have any suggestions as to how this should look?
I've fixed this now.
gulp.series and gulp.parallel return functions so there was no need to wrap stylesCompileIncremental and reloadCss inside another function ie. devServReloadStyles.
As per Blaine's comment here.
So my function:
function devServReloadStyles(cb) {
gulp.series(stylesCompileIncremental, reloadCss);
cb();
}
Can just be assigned to a variable:
const devServReloadStyles = gulp.series(stylesCompileIncremental, reloadCss);
And my stylesWatch task is already calling devServReloadStyles:
// Compile all Sass files and watch for changes
function stylesWatch(cb) {
createImportsGraph();
var watcher = gulp.watch(pathConfig.ui.core.sass.src + '**/*.scss', gulp.parallel(devServReloadStyles));
watcher.on('change', function(event) {
changedFilePath = event;
});
cb();
}
So running gulp stylesWatch now runs the stylesCompileIncremental job (notice how devServReloadStyles doesn't show as it's not a function).

custom yeoman generator test: creating files

I've got a very simple yeoman generator, watchjs, that has speaker subgenerator. Below is hos it is used:
$ yo watchjs:speaker
You called the watch.js speaker subgenerator.
? Speaker file: data/speakers/speakers.json
? Speaker name: abc
{ file: 'data/speakers/speakers.json', name: 'abc' }
Generated slug is: abc
Trying to add: {
"id": "abc",
"name": "abc"
}
Mainly, there are two prompts: file - which defines the json file where data should be appended to and name - which defines actual data to be added to the file (slightly modified). I'm trying to write a simple yeoman test for this. I've been trying to follow the docs, but I'm failing all the time:
$ npm test
> generator-watchjs#0.0.2 test c:\Users\tomasz.ducin\Documents\GitHub\generator-watchjs
> mocha
Watchjs:speaker
{ file: 'speakers.json', name: 'John Doe' } // <- this is my console.log
1) "before all" hook
0 passing (59ms)
1 failing
1) Watchjs:speaker "before all" hook:
Uncaught Error: ENOENT, no such file or directory 'C:\Users\TOMASZ~1.DUC\AppData\Local\Temp\53dac48785ddecb6dabba402eeb04f91e322f844\speakers.json'
at Object.fs.openSync (fs.js:439:18)
at Object.fs.readFileSync (fs.js:290:15)
at module.exports.yeoman.generators.Base.extend.writing (c:\Users\tomasz.ducin\Documents\GitHub\generator-watchjs\speaker\index.js:43:33)
npm ERR! Test failed. See above for more details.
I can't understand where is the file actually created and where are the tests looking for it... There seems to be used a temporary windows location, but anyway, if all things work properly relative to the path, the file should have been found and it's not. Can't figure out what to do to make tests pass.
The best content of my test file is:
'use strict';
var path = require('path');
var assert = require('yeoman-generator').assert;
var helpers = require('yeoman-generator').test;
describe('watchjs:speaker', function () {
before(function (done) {
helpers.run(path.join(__dirname, '../speaker'))
.withOptions({ 'skip-install': true })
.withPrompts({ 'file': 'speakers.json', 'name': "John Doe" })
.on('end', done);
});
it('creates files', function () {
assert.file([
'speakers.json'
]);
});
});
I'm passing a specific name and file name via prompt.
I've found out that npm test call package.json's mocha command (and that's it). But I'm not an expert in mocha.
I'm using node v0.10.35 on Windows7.
First, you should use absolute paths in your test, so the location of the file is predictable.
My test would look something like this:
'use strict';
var fs = require('fs');
var path = require('path');
var assert = require('yeoman-generator').assert;
var helpers = require('yeoman-generator').test;
describe('watchjs:speaker', function () {
before(function (done) {
var self = this;
var name = 'John Doe';
var testPath = path.join(__dirname, 'temp');
// store in test obejct for later use
this.filePath = path.join(testPath, 'speaker.json');
helpers.run(path.join(__dirname, '../speaker'))
.inDir(testPath)
.withPrompts({ 'file': self.filePath, 'name': name })
.withOptions({ 'skip-install': true })
.on('end', done);
});
it('creates files', function () {
assert.file(this.filePath);
assert.fileContent(this.filePath, /\"id\":.*\"john-doe\"/);
assert.fileContent(this.filePath, /\"name\":.*\"John Doe\"/);
});
});
Second, and not directly related to your question, the test above will on the code in the repo you shared. Like I mentioned in my comment, it throws an error here if the file doesn't already exist.
I would change:
var content = JSON.parse(fs.readFileSync(this.options.file, 'utf8'));
to:
try {
var content = JSON.parse(fs.readFileSync(this.options.file, 'utf8'));
} catch(e) {
content = [];
}
With the change above, the test will pass.

How to load multiple JSON files into Jade templates using Grunt?

I can successfully load one JSON file as a data source for my Jade templates using Grunt, similar to this solution.
Now I need to load a set of JSON files from different folders inside my project so that all the data from them is accessible from Jade templates. How to do it better in context of Grunt tasks?
You can load as many json files as you like with this method:
// Jade => HTML
gruntConfig.jade = {
compile: {
options: {
data: {
object: grunt.file.readJSON('JSON_FILE.json'),
object1: grunt.file.readJSON('JSON_FILE_1.json'),
object2: grunt.file.readJSON('JSON_FILE_2.json'),
}
},
}
};
And then in the Jade template you simply need to reference the object. IE:
script(src= object.baseURL + "js/vendor/jquery.js")
script(src= object.baseURL + "js/vendor/elementQuery.js")
script(data-main="js/main", src= object.baseURL + "js/vendor/require.js")
I know this is being answered a bit late but for anyone who comes across this from a google search as I have, this is the answer to loading multiple JSON files into a Jade template using Grunt.js.
You need to combine the Objects before you serve them to Jade. For this I would recommend to use Underscore.js.
Personally I would write a method to fetch the data from the files, like this:
module.exports = function(grunt) {
function combineJSONFiles() {
var object = {};
for(var i=0; i<arguments.length; ++i) {
_(object).extend(grunt.file.readJSON(arguments[i]));
}
return object;
}
grunt.loadNpmTasks('grunt-contrib-jade');
grunt.initConfig(
{
jade: {
html: {
src: './*.jade',
dest: './index2.html',
options: {
client: false,
pretty: true,
data: combineJSONFiles(
"1.json",
"2.json",
"3.json"
)
}
}
}
});
grunt.registerTask('default', 'jade');
};
Hope that helps!

Loading external file from Karma/Jasmine test

I'm trying to accomplish a Jasmine test (using Karma and IntelliJ 13) to validate JSON files. Ideally, my test would simply load a JSON file into a data object, then let me parse through to check for valid formatting and data. I don't need to validate functions before or after, nor do I need to test against a server.
My basic setup is like this:
it("should load an external file", function(){
var asyncCallComplete, result,
_this = this;
// asyncCallComplete is set to true when the ajax call is complete
asyncCallComplete = false;
// result stores the result of the successful ajax call
result = null;
// SECTION 1 - call asynchronous function
runs(function() {
return $.ajax('/test/config.json', {
type: 'GET',
success: function(data) {
asyncCallComplete = true;
result = data;
},
error: function() {
asyncCallComplete = true;
}
});
});
// SECTION 2 - wait for the asynchronous call to complete
waitsFor(function() {
return asyncCallComplete !== false;
}, "async to complete");
// SECTION 3 - perform tests
return runs(function() {
return expect(result).not.toBeNull();
});
}
The problem is that no matter what path I use, I get a 404 error and the file won't load. I've tried loading an external JSON result from a remote server using this test service:
http://date.jsontest.com/
And this works.
My test file is named /test/mySpec.js and my karma.conf.js file is on the root. I have moved around the JSON file to all of these locations with no luck. What am I doing wrong?
UPDATE WITH ANSWER:
Per the answer below, I added this to my karma.conf.js:
// fixtures
{ pattern: 'test/*.json',
watched: true,
served: true,
included: false
}
Then, I wrote my test this way:
var json:any;
it("should load a fixture", function () {
jasmine.getFixtures().fixturesPath = "base/test/"
var f = readFixtures("registration.json");
json = JSON.parse(f);
expect(json).toBeDefined();
})
it("should have a title", function () {
expect(json.title).toNotBe(null);
})
etc...
And it passes.
Are you serving the JSON file via karma.config.js?
You can serve JSON files via fixture:
files: [
// angular
'angular.min.js',
'angular-route.js',
'angular-mocks.js',
// jasmine jquery helper
'jquery-1.10.2.min.js',
'jasmine-jquery.js',
// app
'../../public/js/app.js',
// tests
'*-spec.js',
// JSON fixture
{ pattern: '/test/*.json',
watched: true,
served: true,
included: false }
],
Serving JSON via the fixture is the easiest but because of our setup we couldn't do that easily so I wrote an alternative helper function:
Install
bower install karma-read-json
Usage
Put karma-read-json.js in your Karma files, Example:
files = [
...
'bower_components/karma-read-json/karma-read-json.js',
...
]
Make sure your JSON is being served by Karma, Example:
files = [
...
{pattern: 'json/**/*.json', included: false},
...
]
Use the readJSON function in your tests. Example:
var valid_respond = readJSON('json/foobar.json');
$httpBackend.whenGET(/.*/).respond(valid_respond);
If you are trying to load a HTML file and want to avoid using jasmine-jquery, you may take advantage of the karma-ng-html2js-preprocessor.
In your karma.conf.js :
// generate js files from html templates
preprocessors: {
'resources/*.html': 'ng-html2js'
},
files: [
...
'resources/*.html'
],
plugins: [
...
'karma-ng-html2js-preprocessor'
],
In your jasmine spec :
beforeEach(module('resources/fragment.html'));
var $templateCache;
beforeEach(inject(function (_$templateCache_) {
$templateCache = _$templateCache_;
}));
describe('some test', function () {
it('should do something', function () {
// --> load the fragment.html content from the template cache <--
var fragment = $templateCache.get('resources/fragment.html');
expect(fragment).toBe(...);
});
});
Have you tried simply requiring the json file and storing it as a global variable in your test?
I'm developing an Angular2 project right now (using the Angular CLI), and with this setup it works:
// On the very beginning of the file
let mockConfig = require('./test/config.json');

Categories

Resources