'// Issue: Not able to fetch data from table.
// My Code
const sql = require('mssql/msnodesqlv8');
const poolPromise = new sql.ConnectionPool({
driver: 'msnodesqlv8',
server: "test.database.windows.net",
user: "username",
password: "password",
database: 'database',
port: 1433,
})
.connect()
.then((pool: any) => {
console.log('Connected to MSSQL')
return pool
})
.catch((err: any) => console.log('Database Connection Failed! Bad Config: ', err))
;
describe('any test', () => {
it('verification', async () => {
try
{
const pool = await poolPromise;
const result = await pool.request()
.query('select * from table where username = "Protractor"');
console.log(result);
}
catch(err)
{
console.log(err);
}
});
});
Error Occurred:
[13:20:25] I/launcher - Running 1 instances of WebDriver
[13:20:25] I/hosted - Using the selenium server at http://localhost:4444/wd/hub
Started
Connected to MSSQL
{ RequestError: Invalid column name 'Protractor'.
at handleError (C:\Users\automation\node_modules\mssql\lib\msnodesqlv8\request.js:258:21)
at StreamEvents.emit (events.js:189:13)
at errors.forEach.err (C:\Users\automation\node_modules\msnodesqlv8\lib\reader.js:37:20)
at Array.forEach ()
at routeStatementError (C:\Users\automation\node_modules\msnodesqlv8\lib\reader.js:30:14)
at invokeObject.end (C:\Users\automation\node_modules\msnodesqlv8\lib\reader.js:209:11)
at freeStatement (C:\Users\automation\node_modules\msnodesqlv8\lib\driver.js:183:13)
at cppDriver.freeStatement (C:\Users\automation\node_modules\msnodesqlv8\lib\driver.js:163:13) originalError:
{ Error: [Microsoft][SQL Server Native Client 11.0][SQL Server]Invalid column name 'Protractor'. sqlstate: '42S22', code: 207 },
name: 'RequestError',
number: 207,
state: '42S22' }
Double and single quotes are not interchangeable in T-SQL.
Try this instead:
const result = await pool.request()
.query(`select * from table where username = 'Protractor'`);
or this:
const result = await pool.request()
.query('select * from table where username = \'Protractor\'');
Related
Hello I'm trying to iplement some basics Google Api features and I get some issues when I try to get the unread messages in the gmail mailbox. Here is my code:
require('dotenv').config();
const nodemailer = require('nodemailer')
const { google } = require('googleapis')
const Imap = require('imap');
const CLIENT_ID = process.env.GOOGLE_CLIENT_ID
const CLIENT_SECRET = process.env.GOOGLE_CLIENT_SECRET
const REDIRECT_URI = process.env.GOOGLE_REDIRECT_URL
const REFRESH_TOKEN = process.env.GOOGLE_REFRESH_TOKEN
const oauht2Client = new google.auth.OAuth2(CLIENT_ID, CLIENT_SECRET, REDIRECT_URI)
oauht2Client.setCredentials({refresh_token : REFRESH_TOKEN})
function newMessages(/*onNewEmailCallback*/) {
const imap = new Imap({
user: "mymail#gmail.com",
xoauth2: oauht2Client.getAccessToken(),
host: 'imap.gmail.com',
port: 993,
tls: true,
tlsOptions: {
rejectUnauthorized: false
}
});
function showError(error) {
console.error('Error:', error);
}
function showMessage(msg) {
onNewEmailCallback(msg.subject);
console.log(msg.subject);
}
imap.connect();
imap.once('ready', () => {
imap.openBox('INBOX', true, (err, box) => {
if (err) showError(err);
imap.on('mail', (numNewMsgs) => {
imap.search(['UNSEEN'], (err, results) => {
if (err) showError(err);
results.forEach((result) => {
const f = imap.fetch(result, { bodies: '' });
f.on('message', (msg) => {
msg.on('body', (stream, info) => {
let buffer = '';
stream.on('data', (chunk) => {
buffer += chunk.toString('utf8');
});
stream.on('end', () => {
showMessage(Imap.parseHeader(buffer));
});
});
});
});
});
});
});
});
imap.once('error', showError);
imap.once('end', () => {
console.log('Connection terminée');
});
return imap;
}
When I call it in the index.js I get the error
/home/.../node_modules/imap/lib/Connection.js:1804
return str.replace(RE_BACKSLASH, '\\\\').replace(RE_DBLQUOTE, '\\"');
^
TypeError: str.replace is not a function
at escape
I'm probably passing a bad argument during the imap connection but I don't really know which one and I don't find anything online. Help PLEASE
By the way I had a certificate issue that's why I add the tlsOptions property.
Here the full traceback of the error:
/home/lerou/B-DEV-500-COT-5-2-area-segnon.gnonlonfoun/Back-End/node_modules/imap/lib/Connection.js:1804
return str.replace(RE_BACKSLASH, '\\\\').replace(RE_DBLQUOTE, '\\"');
^
TypeError: str.replace is not a function
at escape (/home/lerou/B-DEV-500-COT-5-2-area-/Back-End/node_modules/imap/lib/Connection.js:1804:14)
at Connection.<anonymous> (/home/lerou/B-DEV-500-COT-5-2-area-/Back-End/node_modules/imap/lib/Connection.js:1672:24)
at Connection._resTagged (/home/lerou/B-DEV-500-COT-5-2-area-/Back-End/node_modules/imap/lib/Connection.js:1535:22)
at Parser.<anonymous> (/home/lerou/B-DEV-500-COT-5-2-area-/Back-End/node_modules/imap/lib/Connection.js:194:10)
at Parser.emit (node:events:512:28)
at Parser._resTagged (/home/lerou/B-DEV-500-COT-5-2-area-/Back-End/node_modules/imap/lib/Parser.js:175:10)
at Parser._parse (/home/lerou/B-DEV-500-COT-5-2-area-/Back-End/node_modules/imap/lib/Parser.js:139:16)
at Parser._tryread (/home/lerou/B-DEV-500-COT-5-2-area-/Back-End/node_modules/imap/lib/Parser.js:82:15)
at Parser._cbReadable (/home/lerou/B-DEV-500-COT-5-2-area-/Back-End/node_modules/imap/lib/Parser.js:53:12)
at TLSSocket.emit (node:events:512:28)
I was following a tutorial which since has been outdated. I have gone back to read the docs and I have been able to solve the "ReferenceError: You are trying to import a file after the Jest environment has been torn down", The data is populated to the database but there is something interesting happening.Most the test are failing given that the data is there in the database. The output is in the image below
beforeEach(async() => {
jest.resetAllMocks();
await dummySystem.populateData(dummySystem.allocateTasks);
}, 60000);
jest.useFakeTimers();
jest.spyOn(global, "setTimeout");
async function populateData(callback) {
User.deleteMany({});
Task.deleteMany({});
dummyUserData.forEach((user) => {
let start = 0;
let end = 8;
const userId = mongoose.Types.ObjectId();
const data = new User({
...user,
_id: userId,
tokens: [{ flask: jwt.sign({ _id: userId }, process.env.JWT_STRING) }],
});
data.save();
callback(userId, start, end);
start += 7;
end += 8;
});
}
function allocateTasks(id, slice_start, slice_end) {
if (slice_end < dummyTaskData.length) {
dummyTaskData.slice(slice_start, slice_end).forEach((task) => {
const data = new Task({ ...task, crafter: id });
setTimeout(() => data.save(), 5000);
});
}
}
test("should create task for authenticated user", function (done) {
let token = {};
let crafter;
User.findOne({ email: "ebagei#consolate.us" }).then((data) => {
token = data.tokens[0].flask;
crafter = data._id;
});
request(server)
.post("/api/tasks")
.set("Authorization", `Bearer ${token}`)
.send({ description: "Apply for driving license test", crafter: crafter })
.expect(201)
.then((task) => {
Task.findById(task._id).expect(task).not.toBeNull();
});
done();
});
test("should load all task for authenticated user", function (done) {
let token = {};
User.findOne({ email: "ebagei#consolate.us" }).then(
(data) => (token = data.tokens[0].flask)
);
request(server)
.get("/api/tasks")
.set("Authorization", `Bearer ${token}`)
.expect(200)
.then((data) => expect(data.lenght).toEqual(7));
done();
});
The issue here is, I am looping through a json file and each loop take 5 milliseconds and its looping for 7 times. If you do the math it gives you 35 milliseconds. I have delayed the beforeEach() function for 1 minute and I believe that is ample time to populate the database. The data is present in the database but all the tests are failing.
I have tried to find out how to use jest.setTimeout() or find an example on it but I got nothing for my used case.
I'm having issues with something about 'request' function and I can't find the way to fix this. Every time I try to run the server and get the results from database, I got this error message instead:
return pool.request().query('SELECT * FROM Products', function (err, result) {
^
TypeError: Cannot read properties of undefined (reading 'request')
Is there something missing I should have on my JS files?
connection.js
const sql = require('mssql/msnodesqlv8');
const dbSettings = {
user: '******',
password: '******',
server: "DESKTOP-*****\\SQLEXPRESS",
database: 'webstore',
driver: 'msnodesqlv8',
options: {
encrypt: true, // for azure
}
}
export async function getConnection() {
try {
const pool = await sql.connect(dbSettings);
return pool;
} catch (error) {
console.log(error);
}
}
products.controller.js
import {getConnection} from '../database/connection';
export const getProducts = async (req, res) => {
const pool = await getConnection();
const result = await pool.request().query('SELECT * FROM Products');
console.log(result);
res.json('products');
};
I have a nodejs project with the current structure below, I need to insert a registry on clients table and return the last inserted ID from this table so I can use it in a second table, but I need to wait until the insert is completed in clients table, before insert the client ID on my second table. I'm trying to use async/await, but I'm always getting a null value.
My MYSQL connection: db.model.js
const config = require('config');
const mysql = require("mysql");
const connection = mysql.createConnection({
host: config.get('mysql.host'),
user: config.get('mysql.user'),
password: config.get('mysql.password'),
database: config.get('mysql.database')
});
connection.connect(function (err) {
if (err) {
console.error(`MySQL Connection Error: ${err.stack}`);
return;
}
console.log(`MySQL connected successfully!`);
});
module.exports = connection;
My CLIENT model
const mysql = require("./db.model");
const Client = function(client) {
this.login = client.login;
};
Client.create = (newClient, result) => {
mysql.query("INSERT INTO clients SET ?", newClient,
(err, res) => {
if (err) {
console.log("error: ", err);
result(err, null);
return;
}
result(null, {
id: res.insertId,
...newClient
});
}
);
};
module.exports = Client;
this is the client controller (i'm trying to use async/await here)
const Client = require('../models/client.model');
exports.create = (login) => {
const client = new Client({
login: login
});
Client.create(client, async (err, data) => {
if(!err) {
return await data.id;
} else {
return false;
}
});
}
And this is another controller, where I want to use methods from my client controller:
const ClientController = require('../controllers/client.controller');
...
utils.connect()
.then(clt => clt.sub.create(data))
.then((sub) => {
let lastInsertedId = ClientController.create(sub.login);
// lastInsertedId always return null here,
// but I know ClientController return a value after some time.
// method below will fail because lastInsertedId cannot be null
TransactionController.transactionCreate(lastInsertedId,
sub.id,
sub.param);
})
.catch(error => res.send(error.response.errors))
any help appreciated.
File to create database connection
const config = require('config');
const mysql = require('mysql2');
const bluebird = require('bluebird');
const dbConf = {
host: config.dbhost,
user: config.dbuser,
password: config.dbpassword,
database: config.database,
Promise: bluebird
};
class Database {
static async getDBConnection() {
try {
if (!this.db) {
// to test if credentials are correct
await mysql.createConnection(dbConf);
const pool = mysql.createPool(dbConf);
// now get a Promise wrapped instance of that pool
const promisePool = pool.promise();
this.db = promisePool;
}
return this.db;
} catch (err) {
console.log('Error in database connection');
console.log(err.errro || err);
}
}
}
module.exports = Database;
Use connection to execute your native query
const database = require('./database');
let query = 'select * from users';
let conn = await dl.getDBConnection();
let [data, fields] = await conn.query(query);
So I'm still using only the npm mysql package, but now I transformed all my queries into promises like below, so I can just wait until all the queries are completed.
const create = (idCliente, transactionId, amount, status) => {
const sql = "INSERT INTO transactions SET ?";
const params = {
id_cliente: idCliente,
transaction_id: transactionId,
amount: amount,
status: status
};
return new Promise((resolve, reject) => {
pool.query(sql, params, (err, result) => {
if (err) {
return reject(err);
}
resolve(result);
});
});
};
then I use like this:
create(params)
.then((result) => {
//call more queries here if needed
})
.catch((err) => { });
You can use sync-sql package of npm for execute async queries.
https://www.npmjs.com/package/sync-sql
Here is an example of it:
const express = require('express')
const mysql = require('mysql')
const app = express()
var syncSql = require('sync-sql');
// Create Connection
const connect = {
host: 'localhost',
user: 'root',
password: '',
database: 'ddd_test'
}
const db = mysql.createConnection(connect)
db.connect((err) => {
if (err) {
throw err
}
console.log("Connected");
})
function getDbData(query) {
return syncSql.mysql(connect, query).data.rows
}
app.get("/getData", async (req, res, next) => {
let sql = 'SELECT * from registration';
res.json({
data:getDbData(sql)
});
})
app.listen(3000, () => {
console.log('App listening on port 3000!');
});
I'm connecting with SQL Server using Node mssql package in my Electron app.
I can't create REST API.
It works fine although I have concerns:
it doesn't close SQL connection after query
it makes a new DB connection for each query
Is that ok?
How it works:
app.js makes 2 queries and logs results
sql.js connects with DB
// app.js
const { getUser, getUsers } = require('./sql');
getUser(10).then((result) => {
console.dir(result);
});
getUsers.then((result) => {
console.dir(result);
})
// sql.js
const sql = require("mssql");
// DB credentials
const config = {
user: 'myuser',
password: '123',
server: 'myserver',
database: 'mydb',
options: {
encrypt: true
}
}
// Creates new connection pool for each query
function connectDB() {
const pool = new sql.ConnectionPool(config);
return pool.connect()
.then(pool => {
console.log('Connected to database');
// returns Promise
return pool;
})
.catch(err => console.log('Database connection failed!', err));
}
// 1st query
function getUser(id) {
const connection = connectDB();
return connection
.then(pool => {
return pool.request()
.input('PK_ID', sql.Int, parseInt(id))
.execute('[uspGetUser]');
})
.then(result => {
return result.recordset[0];
})
.catch(err => {
console.log('Query failed!', err);
});
}
// 2nd query
function getUsers() {
const DB = connectDB();
return DB
.then(pool => {
return pool.request()
.execute('[uspGetUsers]');
})
.then(result => {
return result.recordset[0];
})
.catch(err => {
console.log('Query failed!', err);
});
}
module.exports = {
getUser,
getUsers
}
No, you don't need to close a.k.a. release a connection back to the connection pool after every query. The library already does that for you.
The pool.close() method will close all the connection in the pool. Technically, you should only do that when you're terminating your application, not after every query, since creating a new pool every time create quite an overhead on your application.
I had the same question myself, so I looked up the project's source code.
Solved!
To close DB connection and return results, we can use finally statement and asnyc/await functions.
The finally statement lets you execute code, after try and catch, regardless of the result.
// sql.js
const sql = require("mssql");
// DB credentials
const config = {
user: 'myuser',
password: '123',
server: 'myserver',
database: 'mydb',
options: {
encrypt: true
}
}
async function connectDB() {
const pool = new sql.ConnectionPool(config);
try {
await pool.connect();
console.log('Connected to database');
return pool;
}
catch(err) {
console.log('Database connection failed!', err);
return err;
}
}
async function getAll() {
const DB = await connectDB();
try {
const result = await DB.request()
.query('select * from [your_table]');
return result.recordset;
}
catch (err) {
console.log('Error querying database', err);
return err;
}
finally {
DB.close();
}
}
async function execute() {
let result = await getAll();
console.dir(JSON.stringify(result));
return result;
}
execute();