I have this piece of code in my socketio file and here I can use socket simply.
import _ from 'lodash'
import mongoose from 'mongoose'
exports.register = (server, options, next) => {
var io = require('socket.io')(server.listener)
io.on('connection', async(socket) => {
// here I can use socket.emit() and all
})
next()
}
exports.register.attributes = {
name: 'socket'
}
Now, I need to use theio socket to emit events from various files and don't want to connect this io.on('connection', async(socket) => {}) every time.
How can I do this?
Thank you!!!
next callback doesn't serve a good purpose here because it's synchronous. Since socket.io connection event can be triggered multiple times, it cannot be converted to a promise for easier chaining, so it's better for it to stay callback-based.
It can be:
var socketIo = require('socket.io')
exports.register = (server, options) => {
var io = socketIo(server.listener);
return onConnect => {
io.on('connection', onConnect);
};
}
So connection function is created once:
const myConnection = register(myServer);
And used through the application:
myConnection(client => {
...
});
This situation can also benefit from observables, e.g. RxJS.
In case the socket shouldn't support reconnections, this could be simplified to:
exports.register = (server, options) => {
var io = socketIo(server.listener);
return new Promise(resolve => {
io.once('connection', resolve);
});
}
Connection promise is created once:
So connection function is created once:
const myConnection = register(myServer);
And used through the application:
const client = await myConnection;
...
You can share functionality across your server instance with Server methods
import _ from 'lodash'
import mongoose from 'mongoose'
exports.register = (server, options, next) => {
var io = require('socket.io')(server.listener)
io.on('connection', async(socket) => {
// here I can use socket.emit() and all
})
// here use server methods
server.decorate('server', 'io', io);
next()
}
exports.register.attributes = {
name: 'socket'
}
Then in your controller
handler: function (request, reply) {
const io = request.server.io;
}
Related
I am testing a nodejs express API with a PG db wrapped with Prisma ORM
I configured a testing singleton instance as described by Prisma docs
Since the API is implemented in CommonJS and not TS, I had to make some changes as described in this beautiful page.
Here is a synthesis of what I did, will try to make it short, so it's easier to read
orgs.js (A GET route served by the mock server later on ...)
const Router = require('express-promise-router')
const router = new Router()
const PrismaPool = require('../db/PrismaPool');
module.exports = router
router.get('/assessments', async (req, res) => {
try{
const prisma = PrismaPool.getInstance();
const data = await prisma.org.findUnique({
select:{
assessments:true,
},
where: {
id: res.locals.orgId,
},
})
res.send(data)
}
catch(err){
handleError(err, "[GET]/orgs/assessments", 400, req, res)
}
})
PrismaPool.js (A wrapper to access the unique prisma client instance)
const prisma = require('./PrismaClientInstance').default
class PrismaPool {
constructor() {
throw new Error('Use PrismaPool.getInstance()');
}
static getInstance() {
return prisma
}
}
module.exports = PrismaPool;
PrismaClientInstance.js The unique instance of PrismaClient class. This is the tricky part stitching between the CommonJS world and the TS world.
'use strict';
exports.__esModule = true;
const { PrismaClient } = require('#prisma/client')
const prisma = new PrismaClient()
exports['default'] = prisma;
All this configuration works GREAT at runtime, now, when wrapping it with JEST in unit tests, things go south quickly ...
mock_server.js (a simplified server to expose the orgs API above)
const http = require('http');
const express = require('express');
var orgsRouter = require('../orgs');
const app = express();
app.use('/orgs', orgsRouter);
const port = 3011
app.set('port', port);
const server = http.createServer(app);
function onError(error) {
// herror handling
}
function onListening() {
// some debug messages
}
server.listen(port);
server.on('error', onError);
server.on('listening', onListening);
module.exports = server
PrismaSingletonForTesting.ts (A jest deep mock of the PrismaClient instance)
import { PrismaClient } from '#prisma/client'
import { mockDeep, mockReset, DeepMockProxy } from 'jest-mock-extended'
import prisma from './PrismaClientInstance'
jest.mock('./PrismaClientInstance', () => ({
__esModule: true,
default: mockDeep<PrismaClient>()
}))
beforeEach(() => {
mockReset(prismaMock)
})
export const prismaMock = prisma as unknown as DeepMockProxy<PrismaClient>
orgs.test.js (The tests of the orgs API)
const Request = require("request")
const { prismaMock } = require('../../db/PrismaSingletonForTesting')
const TEST_ORGID = 1
describe('egrest server', () => {
let server
beforeAll(() => {
server = require('./test_server')
})
afterAll(() => {
server.close()
})
describe('assessments', () => {
let data = {}
beforeAll(() => {
const testorg = {
id: TEST_ORGID,
name: 'jestest',
admin:33,
avail_tests: 1234
}
prismaMock.org.findUnique.mockResolvedValue(testorg)
})
it(`read remaining assessments for org ${TEST_ORGID}`, (done) => {
Request.get("http://localhost:3011/orgs/assessments", (error, response, body) => {
data.status = response.statusCode
data.body = body
data.error = error
console.dir(body)
done()
})
})
})
})
I also configured .jest.config with the required line setupFilesAfterEnv: ['./db/PrismaSingletonForTesting.ts']
When I run this test, I get data=undefined in orgs.js, even-though I mocked prisma.org.findUnique by doing prismaMock.org.findUnique.mockResolvedValue(testorg) as described by prisma docs.
Any help would be appreciated.
I have a problem:
I use a room join system [which I programmed only in NodeJS] and socket.io.
https://example.com/room/:roomid/
e.g. https://example.com/room/764363553/
my route:
router.get('/room/:roomid/', function (req, res, next) {
var id = req.params.roomid;
//....
});
I want to make sockets in rooms, but how I make it with the namespace?
io.of("/room/:roomid/").on('connection', function (socket) {
io.of("/room/:roomid/").emit('testsocket');
}
-> does not work
client code:
var socketIO = io(window.location.pathname);
socketIO.on('testsocket', function (data) {
console.log("socket successful connected!");
});
Socketio supports Dynamic namespace since 2.1.0.
The pr is here.
document is here:
A regex or a function can also be provided, in order to create namespace in a dynamic way:
const dynamicNsp = io.of(/^\/dynamic-\d+$/).on('connect', (socket) => {
const newNamespace = socket.nsp; // newNamespace.name === '/dynamic-101'
// broadcast to all clients in the given sub-namespace
newNamespace.emit('hello');
});
// client-side
const socket = io('/dynamic-101');
// broadcast to all clients in each sub-namespace
dynamicNsp.emit('hello');
// use a middleware for each sub-namespace
dynamicNsp.use((socket, next) => { /* ... */ });
With a function:
io.of((name, query, next) => {
next(null, checkToken(query.token));
}).on('connect', (socket) => { /* ... */ });
Server
var manager = io.of("/room").on('connection', function (socket) {
socket.on("join", function(roomid){
socket.join(roomid);
manager.to(roomid).emit('testsocket',roomid);
}
}
Client:
var socketIO = io("/room");
var roomID = window.location.pathname.splitOnLast("/")[1]; //Should ideally be got from req.params.roomid
socketIO.emit("join", roomID)
socketIO.on('testsocket', function (data) {
console.log("Connected to room", data);
});
I'll preface this by saying I am new to nodejs in general. Coming from the world of C#, it's a completely different way of thinking for me.
I've gone through a few courses and I'm setting up a little website as sort of a test for myself. And I'm failing!
I'm using socket.io with node, and I'm trying to broadcast a message with the emitter once in a while. I don't care about specific socket points right now (although I will in the future), so the emitter for this should go out to everyone.
I am having trouble accessing the io object from other modules.
I'll post my server.js file, as well as app/index.js, socket/index.js, helpers/index.js and api/index.js. I hope that posting these will show how it's supposed to work.
Ideally, I'd like to keep all socket-related items in the socket module, for consistency. Right now, I'm trying to get a method to run in the helpers module, but ideally the socket module would be better.
Anyway, server.js:
'use strict';
const express = require('express');
const app = express();
const cryptometers = require('./app');
const api = require('./app/api');
const fs = require('fs');
const sources = require('./app/api/sources.json');
app.set('port', process.env.PORT || 3000);
app.set('view engine', 'ejs');
app.use(express.static('public'));
app.use(cryptometers.session);
app.use('/', cryptometers.router);
cryptometers.ioServer(app).listen(app.get('port'), () =>{
console.log('app listening on port ' + app.get('port'));
api.getData(sources[0].source, sources[0].url, app);
setInterval(function(){api.getData(sources[0].source, sources[0].url, app)}, 60000);
});
Standard fare here. I just added a data retriever that calls to an api once every minute, which updates the database.
app/index.js:
'use strict';
const config = require('./config');
// create an IO server instance
let ioServer = app => {
app.locals.globalMarketCap = [];
const server = require('http').Server(app);
const io = require('socket.io')(server);
io.set('transports', ['websocket']);
io.use((socket, next) => {
require('./session')(socket.request, {}, next);
});
require('./socket')(io,app);
return server;
}
// turns router into a module
module.exports = {
router: require('./routes')(),
session: require('./session'),
ioServer,
}
Here I'm initializing socket.io, binding it to the app. It's also where I initialize a local storage array of data. (Is this a good spot to do this??)
socket/index.js:
'use strict';
const h = require('../helpers');
module.exports = (io, app) => {
io.of('/').on('connection', socket =>{
console.log('socket.io connected to client');
if(app.locals.globalMarketCap){
socket.emit('globalMarketCap', JSON.stringify(app.locals.globalMarketCap));
}
})
}
Here I'm responding to connection events, and pushing out the array of data that I defined in the last file above. Again, ideally I'd like all socket type stuff to stay in here.
helpers/index.js:
'use strict';
const router = require('express').Router();
const db = require('../db');
// iterate through the routes object and mount the routes
let _registerRoutes = (routes, method) => {
for(let key in routes){
if(typeof routes[key] === 'object' && routes[key] !== null && !(routes[key] instanceof Array)){
_registerRoutes(routes[key], key);
} else {
// Register the routes
if(method === 'get'){
router.get(key, routes[key]);
} else if(method === 'post'){
router.post(key, routes[key]);
} else {
router.use(routes[key]);
}
}
}
}
let route = routes => {
_registerRoutes(routes);
return router;
}
let updateGlobalMarketCap = (app) =>{
//app.io.socket.emit('globalMarketCap', JSON.stringify(app.locals.globalMarketCap))
}
module.exports = {
route,
updateGlobalMarketCap
}
The commented out line for updateGlobalMarketCap is where my pain is. Trying to get access to the io object there.
api/index.js
'use strict';
const axios = require("axios");
const db = require("../db");
const h = require("../helpers");
let getData = (source, url, app, cryptoMeters) => {
axios
.get(url)
.then(response => {
//console.log(response.data);
response.data["source"] = source;
var data = new db.globalMarketCapModel(response.data);
app.locals.globalMarketCap = response.data;
var query = { source: source};
db.globalMarketCapModel.findOne({
"source":source
}, 'source old_total_market_cap_usd total_market_cap_usd', function(err, market) {
if (market) {
if(market.old_total_market_cap_usd != response.data.total_market_cap_usd
&& market.total_market_cap_usd != response.data.total_market_cap_usd){
response.data["old_total_market_cap_usd"] = market.total_market_cap_usd;
h.updateGlobalMarketCap(app);
}
db.globalMarketCapModel.update(query, response.data, function (err) {
if (err) {
console.log("uhoh")
} else {
return true;
}
});
} else {
data.save(function (err) {
if (err) {
console.log("uhoh")
} else {
return true;
}
})
}
})
return true;
})
.catch(error => {
console.log(error);
return false;
});
}
module.exports = {
getData
}
The getData function here is where a call to the update emitter would take place.
I've considered using standard node event emitters as a solution to my problem, but that might be gumming up the works and there's a simpler answer.
Anyway, thanks for reading, and I'm interested in any commentary on what i've written so far. pitfalls, mistakes, etc. Learning here! :)
There are many different ways to organize your code to accomplish sharing of the io object. Here's one such scheme. You break out your socket.io initialization code into its own module. You give that module two main features:
A constructor function (that you pass the server to) that allows socket.io to initialize itself on your server.
A method to get the io instance after it's been initialized.
This will allow any other code in your project that wants to get access to the io object to do something like this:
const io = require('./io.js').getIO();
Here's how that io module could be structured:
// io.js
// singleton instance of socket.io that is stored here after the
// constructor function is called
let ioInstance;
modules.exports = function(server) {
const io = require('socket.io')(server);
io.set('transports', ['websocket']);
io.use((socket, next) => {
require('./session')(socket.request, {}, next);
});
// save in higher scope so it can be obtained later
ioInstance = io;
return io;
}
// this getIO method is designed for subsequent
// sharing of the io instance with other modules once the module has been initialized
// other modules can do: let io = require("./io.js").getIO();
module.exports.getIO = function() {
if (!ioInstance) {
throw new Error("Must call module constructor function before you can get the IO instance");
}
return ioInstance;
}
And, this module would be initialized like this:
const io = require('./io.js')(server);
Where you pass it your web server so it can hook to that. It has to be initialized like this before anyone can use .getIO() on it. The storage in the module of the ioInstance makes use of the module caching. The module initialization code is only run once. After that, the same exports are just returned each time which have access to the saved ioInstance inside the module.
I'm using the node-mongodb-native driver with MongoDB to write a website.
I have some questions about how to manage connections:
Is it enough using only one MongoDB connection for all requests? Are there any performance issues? If not, can I setup a global connection to use in the whole application?
If not, is it good if I open a new connection when request arrives, and close it when handled the request? Is it expensive to open and close a connection?
Should I use a global connection pool? I hear the driver has a native connection pool. Is it a good choice?
If I use a connection pool, how many connections should be used?
Are there other things I should notice?
The primary committer to node-mongodb-native says:
You open do MongoClient.connect once when your app boots up and reuse
the db object. It's not a singleton connection pool each .connect
creates a new connection pool.
So, to answer your question directly, reuse the db object that results from MongoClient.connect(). This gives you pooling, and will provide a noticeable speed increase as compared with opening/closing connections on each db action.
Open a new connection when the Node.js application starts, and reuse the existing db connection object:
/server.js
import express from 'express';
import Promise from 'bluebird';
import logger from 'winston';
import { MongoClient } from 'mongodb';
import config from './config';
import usersRestApi from './api/users';
const app = express();
app.use('/api/users', usersRestApi);
app.get('/', (req, res) => {
res.send('Hello World');
});
// Create a MongoDB connection pool and start the application
// after the database connection is ready
MongoClient.connect(config.database.url, { promiseLibrary: Promise }, (err, db) => {
if (err) {
logger.warn(`Failed to connect to the database. ${err.stack}`);
}
app.locals.db = db;
app.listen(config.port, () => {
logger.info(`Node.js app is listening at http://localhost:${config.port}`);
});
});
/api/users.js
import { Router } from 'express';
import { ObjectID } from 'mongodb';
const router = new Router();
router.get('/:id', async (req, res, next) => {
try {
const db = req.app.locals.db;
const id = new ObjectID(req.params.id);
const user = await db.collection('user').findOne({ _id: id }, {
email: 1,
firstName: 1,
lastName: 1
});
if (user) {
user.id = req.params.id;
res.send(user);
} else {
res.sendStatus(404);
}
} catch (err) {
next(err);
}
});
export default router;
Source: How to Open Database Connections in a Node.js/Express App
Here is some code that will manage your MongoDB connections.
var MongoClient = require('mongodb').MongoClient;
var url = require("../config.json")["MongoDBURL"]
var option = {
db:{
numberOfRetries : 5
},
server: {
auto_reconnect: true,
poolSize : 40,
socketOptions: {
connectTimeoutMS: 500
}
},
replSet: {},
mongos: {}
};
function MongoPool(){}
var p_db;
function initPool(cb){
MongoClient.connect(url, option, function(err, db) {
if (err) throw err;
p_db = db;
if(cb && typeof(cb) == 'function')
cb(p_db);
});
return MongoPool;
}
MongoPool.initPool = initPool;
function getInstance(cb){
if(!p_db){
initPool(cb)
}
else{
if(cb && typeof(cb) == 'function')
cb(p_db);
}
}
MongoPool.getInstance = getInstance;
module.exports = MongoPool;
When you start the server, call initPool
require("mongo-pool").initPool();
Then in any other module you can do the following:
var MongoPool = require("mongo-pool");
MongoPool.getInstance(function (db){
// Query your MongoDB database.
});
This is based on MongoDB documentation. Take a look at it.
Manage mongo connection pools in a single self contained module. This approach provides two benefits. Firstly it keeps your code modular and easier to test. Secondly your not forced to mix your database connection up in your request object which is NOT the place for a database connection object. (Given the nature of JavaScript I would consider it highly dangerous to mix in anything to an object constructed by library code). So with that you only need to Consider a module that exports two methods. connect = () => Promise and get = () => dbConnectionObject.
With such a module you can firstly connect to the database
// runs in boot.js or what ever file your application starts with
const db = require('./myAwesomeDbModule');
db.connect()
.then(() => console.log('database connected'))
.then(() => bootMyApplication())
.catch((e) => {
console.error(e);
// Always hard exit on a database connection error
process.exit(1);
});
When in flight your app can simply call get() when it needs a DB connection.
const db = require('./myAwesomeDbModule');
db.get().find(...)... // I have excluded code here to keep the example simple
If you set up your db module in the same way as the following not only will you have a way to ensure that your application will not boot unless you have a database connection you also have a global way of accessing your database connection pool that will error if you have not got a connection.
// myAwesomeDbModule.js
let connection = null;
module.exports.connect = () => new Promise((resolve, reject) => {
MongoClient.connect(url, option, function(err, db) {
if (err) { reject(err); return; };
resolve(db);
connection = db;
});
});
module.exports.get = () => {
if(!connection) {
throw new Error('Call connect first!');
}
return connection;
}
If you have Express.js, you can use express-mongo-db for caching and sharing the MongoDB connection between requests without a pool (since the accepted answer says it is the right way to share the connection).
If not - you can look at its source code and use it in another framework.
You should create a connection as service then reuse it when need.
// db.service.js
import { MongoClient } from "mongodb";
import database from "../config/database";
const dbService = {
db: undefined,
connect: callback => {
MongoClient.connect(database.uri, function(err, data) {
if (err) {
MongoClient.close();
callback(err);
}
dbService.db = data;
console.log("Connected to database");
callback(null);
});
}
};
export default dbService;
my App.js sample
// App Start
dbService.connect(err => {
if (err) {
console.log("Error: ", err);
process.exit(1);
}
server.listen(config.port, () => {
console.log(`Api runnning at ${config.port}`);
});
});
and use it wherever you want with
import dbService from "db.service.js"
const db = dbService.db
I have been using generic-pool with redis connections in my app - I highly recommend it. Its generic and I definitely know it works with mysql so I don't think you'll have any problems with it and mongo
https://github.com/coopernurse/node-pool
I have implemented below code in my project to implement connection pooling in my code so it will create a minimum connection in my project and reuse available connection
/* Mongo.js*/
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/yourdatabasename";
var assert = require('assert');
var connection=[];
// Create the database connection
establishConnection = function(callback){
MongoClient.connect(url, { poolSize: 10 },function(err, db) {
assert.equal(null, err);
connection = db
if(typeof callback === 'function' && callback())
callback(connection)
}
)
}
function getconnection(){
return connection
}
module.exports = {
establishConnection:establishConnection,
getconnection:getconnection
}
/*app.js*/
// establish one connection with all other routes will use.
var db = require('./routes/mongo')
db.establishConnection();
//you can also call with callback if you wanna create any collection at starting
/*
db.establishConnection(function(conn){
conn.createCollection("collectionName", function(err, res) {
if (err) throw err;
console.log("Collection created!");
});
};
*/
// anyother route.js
var db = require('./mongo')
router.get('/', function(req, res, next) {
var connection = db.getconnection()
res.send("Hello");
});
If using express there is another more straightforward method, which is to utilise Express's built in feature to share data between routes and modules within your app. There is an object called app.locals. We can attach properties to it and access it from inside our routes. To use it, instantiate your mongo connection in your app.js file.
var app = express();
MongoClient.connect('mongodb://localhost:27017/')
.then(client =>{
const db = client.db('your-db');
const collection = db.collection('your-collection');
app.locals.collection = collection;
});
// view engine setup
app.set('views', path.join(__dirname, 'views'));
This database connection, or indeed any other data you wish to share around the modules of you app can now be accessed within your routes with req.app.locals as below without the need for creating and requiring additional modules.
app.get('/', (req, res) => {
const collection = req.app.locals.collection;
collection.find({}).toArray()
.then(response => res.status(200).json(response))
.catch(error => console.error(error));
});
This method ensures that you have a database connection open for the duration of your app unless you choose to close it at any time. It's easily accessible with req.app.locals.your-collection and doesn't require creation of any additional modules.
Best approach to implement connection pooling is you should create one global array variable which hold db name with connection object returned by MongoClient and then reuse that connection whenever you need to contact Database.
In your Server.js define var global.dbconnections = [];
Create a Service naming connectionService.js. It will have 2 methods getConnection and createConnection.
So when user will call getConnection(), it will find detail in global connection variable and return connection details if already exists else it will call createConnection() and return connection Details.
Call this service using <db_name> and it will return connection object if it already have else it will create new connection and return it to you.
Hope it helps :)
Here is the connectionService.js code:
var mongo = require('mongoskin');
var mongodb = require('mongodb');
var Q = require('q');
var service = {};
service.getConnection = getConnection ;
module.exports = service;
function getConnection(appDB){
var deferred = Q.defer();
var connectionDetails=global.dbconnections.find(item=>item.appDB==appDB)
if(connectionDetails){deferred.resolve(connectionDetails.connection);
}else{createConnection(appDB).then(function(connectionDetails){
deferred.resolve(connectionDetails);})
}
return deferred.promise;
}
function createConnection(appDB){
var deferred = Q.defer();
mongodb.MongoClient.connect(connectionServer + appDB, (err,database)=>
{
if(err) deferred.reject(err.name + ': ' + err.message);
global.dbconnections.push({appDB: appDB, connection: database});
deferred.resolve(database);
})
return deferred.promise;
}
In case anyone wants something that works in 2021 with Typescript, here's what I'm using:
import { MongoClient, Collection } from "mongodb";
const FILE_DB_HOST = process.env.FILE_DB_HOST as string;
const FILE_DB_DATABASE = process.env.FILE_DB_DATABASE as string;
const FILES_COLLECTION = process.env.FILES_COLLECTION as string;
if (!FILE_DB_HOST || !FILE_DB_DATABASE || !FILES_COLLECTION) {
throw "Missing FILE_DB_HOST, FILE_DB_DATABASE, or FILES_COLLECTION environment variables.";
}
const client = new MongoClient(FILE_DB_HOST, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
class Mongoose {
static FilesCollection: Collection;
static async init() {
const connection = await client.connect();
const FileDB = connection.db(FILE_DB_DATABASE);
Mongoose.FilesCollection = FileDB.collection(FILES_COLLECTION);
}
}
Mongoose.init();
export default Mongoose;
I believe if a request occurs too soon (before Mongo.init() has time to finish), an error will be thrown, since Mongoose.FilesCollection will be undefined.
import { Request, Response, NextFunction } from "express";
import Mongoose from "../../mongoose";
export default async function GetFile(req: Request, res: Response, next: NextFunction) {
const files = Mongoose.FilesCollection;
const file = await files.findOne({ fileName: "hello" });
res.send(file);
}
For example, if you call files.findOne({ ... }) and Mongoose.FilesCollection is undefined, then you will get an error.
npm i express mongoose
mongodb.js
const express = require('express');
const mongoose =require('mongoose')
const app = express();
mongoose.set('strictQuery', true);
mongoose.connect('mongodb://localhost:27017/db_name', {
useNewUrlParser: true,
useUnifiedTopology: true
})
.then(() => console.log('MongoDB Connected...'))
.catch((err) => console.log(err))
app.listen(3000,()=>{ console.log("Started on port 3000 !!!") })
node mongodb.js
Using below method you can easily manage as many as possible connection
var mongoose = require('mongoose');
//Set up default mongoose connection
const bankDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<passwprd>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
bankDB().then(()=>console.log('Connected to mongoDB-Atlas bankApp...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
//Set up second mongoose connection
const myDB = ()=>{
return mongoose.createConnection('mongodb+srv://<username>:<password>#mydemo.jk4nr.mongodb.net/<database>?retryWrites=true&w=majority',options);
}
myDB().then(()=>console.log('Connected to mongoDB-Atlas connection 2...'))
.catch((err)=>console.error('Could not connected to mongoDB',err));
module.exports = { bankDB(), myDB() };
Using Koa and Koa-router, I write a function to accept POST on /videos and then save the video into HDFS(a distributed database). After the saving process finishing, the response should be sent to client saying that the uploading is finish. However, the response is returned before I/O finsh. I am new for JavaScript, Please help.
app.js:
/**
* Module dependencies.
*/
const logger = require('koa-logger');
const serve = require('koa-static');
const koaBody = require('koa-body');
const Koa = require('koa');
const app = new Koa();
const os = require('os');
const path = require('path');
var Router = require('koa-router');
var router = new Router();
// log requests
app.use(logger());
app.use(koaBody({ multipart: true }));
app.use(serve(path.join(__dirname, '/public')));
app
.use(router.routes())
.use(router.allowedMethods());
// listen
app.listen(3000);
console.log('listening on port 3000');
module.exports ={
app:app,
router:router
}
require('./services/service_upload.js')
service_upload.js
const router = require('../app.js').router
const upload = require('../business/business_uploadVideo.js')
function service_upload() {
router.post('/videos', function (ctx, next) {
upload.upload(ctx, function () {
})
ctx.body='upload finish'
console.log("redirect/")
});
}
service_upload()
If you are using recent versions of Koa and koa-router, then you can use async/await with Promises this way:
router.post('/videos', async function (ctx, next) { // Declare function as async
await new Promise((resolve, reject) => { // Create new Promise, await will wait until it resolves
upload.upload(ctx, function (error) {
if (error) { // Probably you should do error handling
reject(error);
return;
}
resolve();
})
});
ctx.body='upload finish'
console.log("redirect/")
});
More on Promises: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
More on async/await: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await