I have a simple Hapi.js Node API. Since I have used TypeScript to write the API, I wrote Gulp task for transpiling the code. My API works fine if I run it directly in my main machine but I get the following error when I try to run it inside Docker:
Error:
Docker compose command:
docker-compose -f docker-compose.dev.yml up -d --build
Here is my code:
./gulpfile:
'use strict';
const gulp = require('gulp');
const rimraf = require('gulp-rimraf');
const tslint = require('gulp-tslint');
const mocha = require('gulp-mocha');
const shell = require('gulp-shell');
const env = require('gulp-env');
/**
* Remove build directory.
*/
gulp.task('clean', function () {
return gulp.src(outDir, { read: false })
.pipe(rimraf());
});
/**
* Lint all custom TypeScript files.
*/
gulp.task('tslint', () => {
return gulp.src('src/**/*.ts')
.pipe(tslint({
formatter: 'prose'
}))
.pipe(tslint.report());
});
/**
* Compile TypeScript.
*/
function compileTS(args, cb) {
return exec(tscCmd + args, (err, stdout, stderr) => {
console.log(stdout);
if (stderr) {
console.log(stderr);
}
cb(err);
});
}
gulp.task('compile', shell.task([
'npm run tsc',
]))
/**
* Watch for changes in TypeScript
*/
gulp.task('watch', shell.task([
'npm run tsc-watch',
]))
/**
* Copy config files
*/
gulp.task('configs', (cb) => {
return gulp.src("src/configurations/*.json")
.pipe(gulp.dest('./build/src/configurations'));
});
/**
* Build the project.
*/
gulp.task('build', ['tslint', 'compile', 'configs'], () => {
console.log('Building the project ...');
});
/**
* Run tests.
*/
gulp.task('test', ['build'], (cb) => {
const envs = env.set({
NODE_ENV: 'test'
});
gulp.src(['build/test/**/*.js'])
.pipe(envs)
.pipe(mocha({ exit: true }))
.once('error', (error) => {
console.log(error);
process.exit(1);
});
});
gulp.task('default', ['build']);
./.docker/dev.dockerfile:
FROM node:latest
LABEL author="Saurabh Palatkar"
# create a specific user to run this container
# RUN adduser -S -D user-app
# add files to container
ADD . /app
# specify the working directory
WORKDIR app
RUN chmod -R 777 .
RUN npm i gulp --g
# build process
RUN npm install
# RUN ln -s /usr/bin/nodejs /usr/bin/node
RUN npm run build
# RUN npm prune --production
EXPOSE 8080
# run application
CMD ["npm", "start"]
./docker-compose.dev.yml:
version: "3.4"
services:
api:
image: node-api
build:
context: .
dockerfile: .docker/dev.dockerfile
environment:
PORT: 8080
MONGO_URL: mongodb:27017
NODE_ENV: development
ports:
- "8080:8080"
links:
- database
database:
image: mongo:latest
ports:
- "27017:27017"
What I am missing here?
Edit:
The problem wasn't what I initially thought. The order of the operations in the Dockerfile was simply wrong: you have to install the dependencies first, then copy all the files into the container (so the installed dependencies will also be copied), only then you can use the application and its dependencies.
I made a pull request on your repo with those fixes :)
Don't user node latest, configure docker file to the exact node version that you are using in the DEV environment.
Related
I have a storybook start script which I want to run for some specific folder:
"storybook": "start-storybook -p 6006 -s ./src"
This loads all stories from src folder. As the amount of stories becomes larger, I want to run stories only from some of subfolders:
start-storybook -p 6006 -s ./src/components/CommonComponents
start-storybook -p 6006 -s ./src/components/DashboardComponents
How can I format argument value dynamically in order to start storybook like this below?
$ yarn storybook CommonComponents
And it would turn into:
start-storybook -p 6006 -s ./src/components/CommonComponents
storybook task could be a script, and then inside the script you parse the arguments, and call start-storybook
Create a task in package.json (e.q run-storybook) and set it to execute the custom script:
"run-storybook": yarn ./path/to/script.js
#!/bin/env node
// script.js
const { spawn } = require('child_process')
const args = process.argv.slice(2)
const port = args[1]
const component = args[3]
const path = `./src/components/${component}`
spawn('yarn', ['start-storybook', '-p', port, '-s', path], {
stdio: 'inherit',
shell: true
})
Then you can call: yarn run-storybook -p 600 -s yourcomponent
Note: make sure the script is executable: chmod +x /path/to/script.js.
My file is:
import * as fs from 'fs'
async function loadModels() {
console.log('here i am!')
const modelFiles = fs.readFileSync(__dirname + '/models')
console.log(modelFiles)
}
(async () => {
loadModels()
})()
And in package.json, I have:
"fixtures": "tsc fixtures/index"
So when I run yarn fixtures, I get:
yarn fixtures
yarn run v1.22.4
$ tsc fixtures/index
✨ Done in 8.78s.
Why won't my loadModels run?
tsc is only compiling your TypeScript file, it's not executing it
to execute your TypeScript file you can use ts-node package
and then in your package.json:
"fixtures": "ts-node fixtures/index"
After running npm run serve to start my Vue web app, it starts fine, but the page doesn't load and I see the following in the console.
Error: Please install pg package manually
_loadDialectModule connection-manager.js:81
ConnectionManager connection-manager.js:18
PostgresDialect index.js:14
Sequelize sequelize.js:322
<anonymous> main.js:32
js app.js:11203
\__webpack_require__ app.js:770
fn app.js:130
1 app.js:11265
\__webpack_require__ app.js:770
<anonymous> app.js:908
<anonymous> app.js:911
I've already tried installing pg and pg-hstore both locally and globally.
npm install --save pg pg-hstore
npm install --save -g pg pg-hstore
Below is part of my main.js file to test sequelize.
const Sequelize = require("sequelize");
const sequelize = new Sequelize(
"postgres://USER:PASSWORD#localhost:5432/DBNAME"
);
sequelize
.authenticate()
.then(() => {
// eslint-disable-next-line no-console
console.log("Connection has been established successfully.");
})
.catch(err => {
// eslint-disable-next-line no-console
console.error("Unable to connect to the database:", err);
});
If you have installed sequelize then you have to install pg globally.
The idea of the following code is to react to changes in files inside a folder. When I run this code on my macOS everything works as executed.
let fs = require("fs");
let options = {
encoding: 'buffer'
}
fs.watch('.', options, function(eventType, filename) {
if(filename)
{
console.log(filename.toString());
}
});
Inside a Docker Container on the other hand the code does not react to file changes. I run the code in the following way:
docker run -it --rm --name $(basename $(pwd)) -v $(pwd):/app -w /app node:slim sh -c 'node index'
Is there an option to use with Docker to allow system notifications for file changes?
New answer
Initially i advised Gulp (see bottom of the updated post with old answer). It did not worked because you tried to use it programatically, when Gulp is task runner and have own usage patterns, which i did not described. Since you need something specific, i have very simple, surely working solution for you.
It uses one of modules used by gulp called gaze - module which have approx 1.7m downloads per week. Its working, for sure on every system.
npm install gaze --save
To make it work lets create index.js in your root folder (which will be mounted to your app folder inside of the docker, and then just follow basic instruction given in module README:
var gaze = require('gaze');
// Watch all .js files/dirs in process.cwd()
gaze('**/*.js', function(err, watcher) {
// Files have all started watching
// watcher === this
console.log('Watching files...');
// Get all watched files
var watched = this.watched();
// On file changed
this.on('changed', function(filepath) {
console.log(filepath + ' was changed');
});
// On file added
this.on('added', function(filepath) {
console.log(filepath + ' was added');
});
// On file deleted
this.on('deleted', function(filepath) {
console.log(filepath + ' was deleted');
});
// On changed/added/deleted
this.on('all', function(event, filepath) {
console.log(filepath + ' was ' + event);
});
// Get watched files with relative paths
var files = this.relative();
});
Now lets run your command:
docker run -it --rm --name $(basename $(pwd)) -v $(pwd):/app -w /app node sh -c 'node index'
What we have upon changes - Linux outpud, but this works for Mac OS too.
blackstork#linux-uksg:~/WebstormProjects/SO/case1> docker run -it --rm --name $(basename $(pwd)) -v $(pwd):/app -w /app node sh -c 'node index'
Watching files...
/app/bla was changed
/app/foobar.js was changed
Old answer.
You can do it with gulp.
Gulpfile.js
const gulp = require('gulp');
gulp.task( 'watch' , ()=>{
return gulp.watch(['app/**'], ['doStuff']);
});
gulp.task( 'doStuff', cb => {
console.log('stuff');
//do stuff
cb();
});
So far such approach worked for me (of course you can build much more
complex things, but if find using gulp conventient for different
filesystem tasks).
Trying to run karma using gulp for running tests but after following the example from: https://github.com/karma-runner/gulp-karma
My gulp file:
var gulp = require('gulp');
var Server = require('karma').Server;
/**
* Run test once and exit
*/
gulp.task('test', function (done) {
new Server({
configFile: __dirname + '/karma.conf.js',
singleRun: true
}, done).start();
});
/**
* Watch for file changes and re-run tests on each change
*/
gulp.task('tdd', function (done) {
new Server({
configFile: __dirname + '/karma.conf.js'
}, done).start();
});
gulp.task('default', ['tdd']);
after I run: gulp test I get the Error:
TypeError: Server is not a function at Gulp.<anonymous>
Any suggestions of what might be wrong?
Which version of karma do you have installed?
The API has changed from 0.12 to 0.13 and the example you've shown is the one for 0.13.
The previous API was the following:
var server = require('karma').server;
//... more code
server.start( { .... } , function(exitCode){
// ...
});
The issue was that karma-cli npm module wasn't properly installed globally. Running: npm install -g karma-cli solved the issue.