I want to have a test that is running concurrently and I am using it.concurrent in jestjs.
The problem is the mockData is not created in each it.concurrent test when I'm running the test. And also I want each test to share the data so I can see if it has a deadlock. By the way, I am using PostgreSQL and knex.js here. Thanks in advance.
describe('#Concurrent tests', () => {
const knex: Knex = container.get(Types.Knex);
const orderDataSource: IOrderDataSource = container.get(Types.OrderDataSource);
const branchDataSource: IBranchDataSource = container.get(Types.BranchDataSource);
const brandDataSource: IBrandDataSource = container.get(Types.BrandDataSource);
const variantDataSource: IVariantDataSource = container.get(Types.VariantDataSource);
const categoryDataSource: ICategoryDataSource = container.get(Types.CategoryDataSource);
const productDataSource: IProductDataSource = container.get(Types.ProductDataSource);
const branchVariantDataSource: IBranchVariantDataSource = container.get(Types.BranchVariantDataSource);
const basketDataSource: IBasketDataSource = container.get(Types.BasketDataSource);
const mockBrand2 = Factory.build('brand',{
id: 'asdf',
name: 'Brand 2',
});
const mockBranch2 = Factory.build('branch',{
id: 'branch-2',
brandId: mockBrand2.id,
}) as IBranch;
const mockCategory2 = Factory.build('category.activated', {
id:'category-2',
brandId: mockBrand2.id,
}) as ICategory;
const mockProduct2 = Factory.build('product', {
id: 'product-2',
categoryId: mockCategory2.id,
}) as IProduct;
const mockVariant2 = Factory.build('variant', {
id: 'variant-2',
productId: mockProduct2.id,
}) as IVariant;
const mockBranchVariant2 = Factory.build('branchVariant', {
id: 'branch-variant-2',
branchId: mockBranch2.id,
variantId: mockVariant2.id,
stock: 100,
});
const mockBasket = Factory.build('basket', {
id: 'basket-1',
branchId: mockBranch2.id,
orderItems: [
{
variantId: mockVariant2.id,
quantity: 10,
}
]
}) as IBasket;
const mockBasket2 = Factory.build('basket-3',{
id: 'basket-2',
branchId: mockBranch2.id,
orderItems: [
{
variantId: mockVariant2.id,
quantity: 20,
}
]
}) as IBasket;
beforeEach(async () => {
await brandDataSource.create(mockBrand2);
await categoryDataSource.create(mockCategory2);
await branchDataSource.create(mockBranch2);
await productDataSource.create(mockProduct2);
await variantDataSource.create(mockVariant2);
await branchVariantDataSource.create(mockBranchVariant2);
await basketDataSource.create(mockBasket);
await basketDataSource.create(mockBasket2);
});
afterEach(async () => {
await brandDataSource.truncate();
await branchDataSource.truncate();
await productDataSource.truncate();
await variantDataSource.truncate();
await categoryDataSource.truncate();
await branchVariantDataSource.truncate();
await basketDataSource.truncate();
await orderDataSource.truncate();
});
it.concurrent('creates multiple baskets', async () => {
const params = {
items: [
{
variantId: mockVariant2.id,
},
],
address: {
latitude: 14.6930497078559,
longitude: 120.99470307301551,
addressDetails: '713 Gold Eagle St., Gen. T. De Leon, Valenzuela City',
},
mobileNum: '09198003996',
paymentMethod: 'cod',
deliveryId: 'iahdsfiuahdsnfiuchnaiuhiudfhiahfhalifnhcasf',
shippingFee: 150.0,
customerName: 'David Tan',
email: 'd.tan#gmail.com',
};
const event = {
requestContext: {
authorizer: {
lambda: {
principalId: 'lkjfi123nOPIUJnss',
}
}
},
pathParameters: {
branchId: mockBranch2.id,
},
body: JSON.stringify(params),
isTesting: true,
};
const res = await handler(event);
const body = JSON.parse(res.body);
});
it.concurrent('creates multiple baskets-2', async () => {
const params = {
items: [
{
variantId: mockVariant2.id,
},
],
address: {
latitude: 14.6930497078559,
longitude: 120.99470307301551,
addressDetails: '713 Gold Eagle St., Gen. T. De Leon, Valenzuela City',
},
mobileNum: '09198003996',
paymentMethod: 'cod',
deliveryId: 'iahdsfiuahdsnfiuchnaiuhiudfhiahfhalifnhcasf',
shippingFee: 150.0,
customerName: 'Mark Uy',
email: 'mark#gmail.com',
};
const event = {
requestContext: {
authorizer: {
lambda: {
principalId: 'AsdioSDOIj123kj',
}
}
},
pathParameters: {
branchId: mockBranch2.id,
},
body: JSON.stringify(params),
isTesting: true,
};
const res = await handler(event);
const body = JSON.parse(res.body);
});
});
Related
In my application i am trying to build a route for creating orders the flow should go like this:
1- reserve the products for the user
2- empty the user's cart
3- take payment
however if one operation fails the database should return to its original state before updating any documents
I'm using mongodb
this is what i tried:
export default async function order(userId) {
const User = mongoose.model('user');
const Product = mongoose.model('product');
const Order = mongoose.model('order');
const session = await mongoose.startSession();
session.startTransaction();
try {
const user = await User.findById(userId).session(session);
// reserve the product for the user
const results = await Product.bulkWrite(
user.cart.map((item) => ({
updateOne: {
filter: {
_id: item.product,
'combinations.size': item.size,
'combinations.color': item.color,
'combinations.qty': { $gte: item.qty },
},
update: {
$inc: { 'combinations.$.qty': -item.qty },
},
},
})),
{ session }
);
if (results.nModified !== user.cart.length) {
throw new Error('Not all products available, transaction aborted');
}
// empty user's cart
await User.findByIdAndUpdate(userId, { cart: [] }).session(session);
// take payment from the user
const payment = (successed) => {
if (successed) return { id: 'charge id' };
throw new Error('payment failed');
};
const charge = payment(true);
const productsInCart = await Product.find({
_id: { $in: user.cart.map((item) => item.product.toString()) },
});
// create the order
await Order.create([
{
user: userId,
products: user.cart.map((item) => ({
...item,
price: productsInCart.find(
(product) => product._id.toString() === item.product.toString()
).price.curr1,
})),
currency: 'curr1',
status: 'placed',
chargeId: charge.id,
},
]);
await session.commitTransaction();
} catch (err) {
console.log(err);
await session.abortTransaction();
} finally {
await session.endSession();
}
}
however when testing it i get this error
these are my models:
User:
import { Schema, model } from 'mongoose';
const UserSchema = new Schema({
email: String,
cart: [
{
product: { type: Schema.Types.ObjectId, ref: 'product' },
color: String,
size: String,
qty: Number,
},
],
orders: [{ type: Schema.Types.ObjectId, ref: 'order' }],
});
model('user', UserSchema);
Product
import { Schema, model } from 'mongoose';
const ProductSchema = new Schema({
title: String,
price: {
curr1: Number, // price in a region
curr2: Number, // price in another region
},
combinations: [
{
size: String,
color: String,
qty: Number,
},
],
});
model('product', ProductSchema);
Order:
import { Schema, model } from 'mongoose';
const OrderSchema = new Schema({
user: { type: Schema.Types.ObjectId, ref: 'user' },
products: [
{
product: { type: Schema.Types.ObjectId, ref: 'product' },
size: String,
color: String,
qty: Number,
price: Number,
},
],
currency: String,
status: String,
chargeId: String,
});
model('order', OrderSchema);
and my test:
import async from 'async';
import mongoose from 'mongoose';
import order from '../src/order.js';
const User = mongoose.model('user');
const Order = mongoose.model('order');
const Product = mongoose.model('product');
describe('Race condition for orders', async () => {
let products;
let users;
beforeEach(async () => {
products = await Product.insertMany([
{
title: 'test product 1',
price: { curr1: 10, curr2: 10 },
combinations: [
{
size: 'large',
color: 'black',
qty: 1,
},
{
size: 'small',
color: 'red',
qty: 2,
},
],
},
{
title: 'test product 2',
price: { curr1: 10, curr2: 10 },
combinations: [
{
size: 'medium',
color: 'yellow',
qty: 1,
},
],
},
]);
users = await User.insertMany([
{
email: 'test#test.test',
cart: products.map((product) => ({
product: product._id.toString(),
size: product.combinations[0].size,
color: product.combinations[0].color,
qty: 1,
})),
},
{
email: 'test2#test.test',
cart: products.map((product) => ({
product: product._id.toString(),
size: product.combinations[0].size,
color: product.combinations[0].color,
qty: 1,
})),
},
]);
});
it('calls order twice at the same time', async () => {
await new Promise((resolve, reject) =>
async.parallel(
[
async () => {
await order(users[0]._id.toString());
},
async () => {
await order(users[1]._id.toString());
},
],
(err, res) => {
if (err) return reject(err);
resolve(res);
}
)
);
console.log(await Order.find({}));
});
});
Why isn't map showing up? Been spending the last few days trying to figure out why isn't it recognizing my arrays. My goal is to use stripe to individual put a item into the cart from my shop.html.
The problem is that when I click a product to add to the cart to checkout it shows all the products, including the prices and total, on the stripe session instead of the item I selected.
server.js
// require("dotenv").config()
// This is your test secret API key.
const stripe = require('stripe')('sk_test_XXXXXXX')
const express = require('express')
const app = express()
app.use(express.static('public'))
app.use(express.json())
const YOUR_DOMAIN = 'http://localhost:4242';
const storeItems = new Map([
[1, { priceId: 'price_XXXXXX', name: "Black Sweatsuit"}],
[2, { priceId: 'price_XXXXXX', name: "Grey Sweatsuit"}] ,
[3, { priceId: 'price_XXXXXX', name: "Red Sweatsuit"}],
[4, { priceId: 'price_XXXXXX', name: "Blue Sweatsuit"}],
[5, { priceId: 'price_XXXXXX', name: "Black T-Shirt"}],
[6, { priceId: 'price_XXXXXX', name: "Blue T-Shirt"}],
[7, { priceId: 'price_XXXXXX', name: "Purple T-Shirt"}],
])
app.post('/create-checkout-session', async (req, res) => {
try {
const session = await stripe.checkout.sessions.create({
payment_method_types: ['card'],
mode: 'payment',
line_item: req.body.items.map(item => {
const storeItem = storeItems.get(item.id)
return {
price_data: {
currency:'usd',
product_data: {
name: storeItem.name
},
unit_amount: storeItem.priceId
},
quantity: item.quantity
}
}),
success_url:`${YOUR_DOMAIN}/success.html`,
cancel_url: `${YOUR_DOMAIN}/cancel.html`,
automatic_tax: {enabled: true},
})
res.json({ url: session.url})
} catch (e) {
res.status(500).json({ error: e.message })
}
})
app.listen(4242, () => console.log('Running on port 4242'));
checkout.js
const button = document.getElementById("checkout-button")
button.addEventListener("click", () => {
fetch('/create-checkout-session', {
method: 'POST',
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
items: [
{ id: 1, quantity: 1 },
{ id: 2, quantity: 1 },
{ id: 3, quantity: 1 },
{ id: 4, quantity: 1 },
{ id: 5, quantity: 1 },
{ id: 6, quantity: 1 },
{ id: 7, quantity: 1 },
],
}),
})
.then(res => {
if (res.ok) return res.json()
return res.json().then(json => Promise.reject(json))
})
.then(({ url }) => {
window.location = url
})
.catch(e => {
console.error(e.error)
})
})
terminal
{"error":"Cannot read properties of undefined (reading 'map')"}
I have two arrays of objects which looks something like this:
const users = [
{
status: 'failed',
actionName: 'blabla',
userId: 1,
},
{
status: 'success',
actionName: 'blablabla',
userId: 2,
},
];
Second one
const usersDetails = [
{
name: 'Joseph',
id: 1,
},
{
name: 'Andrew',
id: 2,
},
];
I want to check if userId is equal to id and if so then push the name from usersDetails into users objects. So output would look like this:
const users = [
{
status: 'failed',
actionName: 'blabla',
userId: 1,
name: 'Joseph'
},
{
status: 'success',
actionName: 'blablabla',
userId: 2,
name: 'Andrew'
}];
The easiest solution would be to do:
const users = [
{
status: 'failed',
actionName: 'blabla',
userId: 1,
},
{
status: 'success',
actionName: 'blablabla',
userId: 2,
},
];
const usersDetails = [
{
name: 'Joseph',
id: 1,
},
{
name: 'Andrew',
id: 2,
},
];
const getAllUserInfo = () => users.map(user => {
const userExtraInfo = usersDetails.find(details => details.id === user.userId)
const fullUser = {...user, ...userExtraInfo}
delete fullUser.id
return fullUser
})
console.log(getAllUserInfo())
const users = [ { status: 'failed', actionName: 'blabla', userId: 1, }, { status: 'success', actionName: 'blablabla', userId: 2, }, ];
const usersDetails = [ { name: 'Joseph', id: 1, }, { name: 'Andrew', id: 2, }, ];
const newUsers = users.map(user => {
user.name = usersDetails.find(u => u.id === user.userId)?.name;
return user;
});
console.log(newUsers);
You can try this code :
let result = users.map(user => ({...user, ...usersDetails.find(userDetail => userDetail.id == user.userId) }));
console.log(result);
If you only want to get name from the second array :
let result = users.map(user => ({...user, 'name': usersDetails.find(userDetail => userDetail.id == user.userId).name }));
If you want to get all properties exepted id ::
let result = users.map(user => {
let result = {...user, ...usersDetails.find(userDetail => userDetail.id == user.userId) }
delete result.id;
return result;
});
Hope this answer will work for you
const users = [
{
status: "failed",
actionName: "blabla",
userId: 1,
},
{
status: "success",
actionName: "blablabla",
userId: 2,
},
];
const usersDetails = [
{
name: "Joseph",
id: 1,
},
{
name: "Andrew",
id: 2,
},
];
users.map((e) => {
usersDetails.find((_e) => {
if (e.userId === _e.id) {
e.name = _e.name;
}
});
});
console.log(users);
you can do something like this using a single loop
const users = [
{
status: "failed",
actionName: "blabla",
userId: 1,
},
{
status: "success",
actionName: "blablabla",
userId: 2,
},
];
const usersDetails = [
{
name: "Joseph",
id: 1,
},
{
name: "Andrew",
id: 2,
},
];
const result = Object.values([...users, ...usersDetails].reduce((res, {userId, id,...item}) => {
const key = id || userId
return {
...res,
[key]: {...(res[key] || {userId: key}), ...item}
}
}, {}))
console.log(result);
const users = [{
status: 'failed',
actionName: 'blabla',
userId: 1,
},
{
status: 'success',
actionName: 'blablabla',
userId: 2,
},
];
const usersDetails = [{
name: 'Joseph',
id: 1,
},
{
name: 'Andrew',
id: 2,
},
];
users.forEach(each => {
const found = usersDetails.find(detail => detail.id === each.userId);
if (found) {
each.name = found.name;
}
});
console.log(users);
I've been looking all over for an answer to this and I've been banging my head on the wall. I wrote a cursor based pagination example that works well with graphql and the thing is I thought I would do the same thing with authors, that I did with books and the only way I can figure out how to do this is to completely duplicate everything. On the root query there is quite a long chunk of code handling the pagination and I would hate to do that all over for the authors endpoint but I can't seem to find a way to do this while reusing the code
Here is the code
const express = require('express')
const { graphqlHTTP } = require('express-graphql')
const {
GraphQLSchema,
GraphQLObjectType,
GraphQLString,
GraphQLList,
GraphQLInt,
GraphQLNonNull
} = require('graphql')
const {
PageType,
convertNodeToCursor,
convertCursorToNodeId
} = require('./pagination')
const app = express()
const authors = [
{ id: 1, name: "Author 1"},
{ id: 2, name: "Author 2"},
{ id: 3, name: "Author 3"}
]
const books = [
{ id: 1, title: "Book 1", authorId: 1 },
{ id: 2, title: "Book 2", authorId: 1 },
{ id: 3, title: "Book 3", authorId: 1 },
{ id: 4, title: "Book 4", authorId: 2 },
{ id: 5, title: "Book 5", authorId: 2 },
{ id: 6, title: "Book 6", authorId: 2 },
{ id: 7, title: "Book 7", authorId: 3 },
{ id: 8, title: "Book 8", authorId: 3 },
{ id: 9, title: "Book 9", authorId: 3 }
]
const Book = new GraphQLObjectType({
name: 'Book',
description: 'this is a book',
fields: () => ({
id: { type: GraphQLNonNull(GraphQLInt) },
title: { type: GraphQLNonNull(GraphQLString) },
authorId: { type: GraphQLNonNull(GraphQLInt) },
author: {
type: Author,
resolve: ({authorId}) => {
return authors.find(author => author.id === authorId)
}
}
})
})
const Author = new GraphQLObjectType({
name: 'Author',
description: 'this represents the author of a book',
fields: () => ({
id: { type: GraphQLNonNull(GraphQLInt) },
name: { type: GraphQLNonNull(GraphQLString) },
books: {
type: GraphQLList(Book),
resolve: ({id}) => {
return books.filter(book => book.authorId === id)
}
}
})
})
const RootQuery = new GraphQLObjectType({
name: 'RootQueryType',
description: 'this is the root query',
fields: () => ({
book: {
type: Book,
description: 'a single book',
args: {
id: { type: GraphQLInt }
},
resolve: (_, { id }) => {
return books.find(book => book.id === id)
}
},
author: {
type: Author,
description: 'a single author',
args: {
id: { type: GraphQLInt },
},
resolve: (_, { id }) => {
return authors.find(author => author.id === id)
}
},
books: {
type: PageType(Book),
description: 'a list of books',
args: {
first: { type: GraphQLInt },
afterCursor: { type: GraphQLString }
},
resolve: (_, { first, afterCursor }) => {
let afterIndex = 0
if (typeof afterCursor === 'string') {
let nodeId = convertCursorToNodeId(afterCursor)
let nodeIndex = books.findIndex(book => book.id === nodeId)
if (nodeIndex >= 0) {
afterIndex = nodeIndex + 1
}
}
const slicedData = books.slice(afterIndex, afterIndex + first)
console.log('sliced data: ', slicedData)
const edges = slicedData.map(node => ({
node,
cursor: convertNodeToCursor(node)
}))
let startCursor = null
let endCursor = null
if (edges.length > 0) {
startCursor = convertNodeToCursor(edges[0].node)
endCursor = convertNodeToCursor(edges[edges.length - 1].node)
}
let hasNextPage = books.length > afterIndex + first
return {
totalCount: books.length,
edges,
pageInfo: {
startCursor,
endCursor,
hasNextPage
}
}
}
}
})
})
const schema = new GraphQLSchema({
query: RootQuery
})
app.use('/graphql', graphqlHTTP({
schema,
graphiql: true
}))
app.listen(3000, () => console.log('app running at http://localhost:3000/graphql'))
and I handle the pagination in another file here:
const {
GraphQLString,
GraphQLInt,
GraphQLBoolean,
GraphQLObjectType,
GraphQLList,
} = require('graphql')
const Edge = (itemType) => {
return new GraphQLObjectType({
name: 'EdgeType',
fields: () => ({
node: { type: itemType },
cursor: { type: GraphQLString }
})
})
}
const PageInfo = new GraphQLObjectType({
name: 'PageInfoType',
fields: () => ({
startCursor: { type: GraphQLString },
endCursor: { type: GraphQLString },
hasNextPage: { type: GraphQLBoolean }
})
})
const PageType = (itemType) => {
return new GraphQLObjectType({
name: 'PageType',
fields: () => ({
totalCount: { type: GraphQLInt },
edges: { type: new GraphQLList(Edge(itemType)) },
pageInfo: { type: PageInfo }
})
})
}
const convertNodeToCursor = (node) => {
// Encoding the cursor value to Base 64 as suggested in GraphQL documentation
return Buffer.from((node.id).toString()).toString('base64')
}
const convertCursorToNodeId = (cursor) => {
// Decoding the cursor value from Base 64 to integer
return parseInt(Buffer.from(cursor, 'base64').toString('ascii'))
}
module.exports = {
PageType,
convertNodeToCursor,
convertCursorToNodeId
}
Now if I copy and paste the books endpoint and change it to authors, and change the type to PageType(Author) then I get another error:
Schema must contain uniquely named types but contains multiple types named "PageType".
So this clearly isn't a solution either
You cannot have one EdgeType that contains Authors and another EdgeType that contains Books. Instead, you will need one AuthorEdge and one BookEdge type.
The same holds for the PageType - there can't be two different types with different fields but the same name.
The solution is relatively simple though - if you dynamically generated these types in a function, also name them dynamically:
const Edge = (itemType) => {
return new GraphQLObjectType({
name: itemType.name + 'Edge',
// ^^^^^^^^^^^^^^^^^^^^^^
fields: () => ({
node: { type: itemType },
cursor: { type: GraphQLString }
})
})
}
const PageInfo = new GraphQLObjectType({
name: 'PageInfo',
fields: () => ({
startCursor: { type: GraphQLString },
endCursor: { type: GraphQLString },
hasNextPage: { type: GraphQLBoolean }
})
})
const PageType = (itemType) => {
return new GraphQLObjectType({
name: itemType.name + 'sPage',
// ^^^^^^^^^^^^^^^^^^^^^^^
fields: () => ({
totalCount: { type: GraphQLInt },
edges: { type: new GraphQLList(Edge(itemType)) },
pageInfo: { type: PageInfo }
})
})
}
I'm using nock to fake out the response from a GraphQL server, however it seems that my nock code isn't being respected, and I can't quite seem to understand why.
What follows is a large chunk of code, but it seems that when I output the result of my function call, the data isn't what I expect.
In the test for: retrieves each release id individually it calls nock twice, but where i'm expecting output like:
[ { id: 123456,
asset_controller: { id: 54321 },
display_author: 'Jason',
author: [ [Object] ] },
{ id: 78902,
asset_controller: { id: 54321 },
display_author: 'King',
author: [ [Object] ] } ]
I'm instead getting 2 of the exact same Jason objects back.
In the test for: returns an expected result containing errors when the release is not available, I should be getting an array with an error in, but i am getting a single Jason object back.
I'm wondering if my beforeEach, where i stub a nock response, is overriding the nock responses in each of my tests? Removing the nock response in the beforeEach results in errors.
const Releases = require('../../../src/services/Releases');
const authorisation = require('../../../src/services/Authorisation');
const ServiceDiscovery = require('service-discovery');
const Lock = require('../../../src/helpers/Lock');
const http = require('http');
describe('Releases', () => {
let serviceDiscovery;
const address = '192.0.0.1';
const port = 4002;
const url = 'http://' + address + ':' + port;
const token = 'abcd';
beforeEach(() => {
sinon.stub(authorisation, 'authorise').resolves(
{
token: token,
expiresAt: 12344657547
}
);
authorisation.token = token;
serviceDiscovery = sinon.createStubInstance(ServiceDiscovery);
serviceDiscovery.discoverServiceDetails.resolves(
[
{
Address: address,
ServicePort: port,
},
]
);
nock(url, {
reqheaders: {
'authorization': 'Bearer ' + token,
'content-type': 'application/json',
},
})
.post('/graphql')
.reply(200, {
data: {
ReleaseFormat: [
{
id: 123456,
asset_controller: {
id: 54321,
},
display_author: 'Jason',
author: [
{
id: 123456,
name: 'jason',
},
],
}
]
}
});
});
afterEach(() => {
authorisation.authorise.restore();
});
describe('getReleases', () => {
it('retrieves each release id individually', async () => {
const releaseIDs = [123456, 78902];
const releases = new Releases(authorisation, http, serviceDiscovery, Lock);
serviceDiscovery.discoverServiceDetails.resolves(
[
{
Address: address,
ServicePort: port,
alive: true,
},
]
);
nock(url, {
reqheaders: {
'Authorization': 'Bearer ' + token,
'content-type': 'application/json',
},
})
.post('/graphql', JSON.stringify({
'query': `{
ReleaseFormat(qbe: {
id: 123456
asset_controller: {
id: 57753805
}
})
{
id,
asset_controller {
id,
name
},
display_author,
author {
id,
name,
ISNI
},
}
}`
}))
.reply(200, {
data: {
ReleaseFormat: [
{
id: 123456,
asset_controller: {
id: 54321,
},
display_author: 'Jason',
author: [
{
id: 123456,
name: 'jason',
},
],
}
]
}
})
.post('/graphql', JSON.stringify({
'query': `{
ReleaseFormat(qbe: {
id: 78902
asset_controller: {
id: 57753805
}
})
{
id,
asset_controller {
id,
name
},
display_author,
author {
id,
name,
},
}
}`
}))
.reply(200, {
data: {
ReleaseFormat: [
{
id: 78902,
asset_controller: {
id: 54321,
},
display_author: 'King',
author: [
{
id: 8764567,
name: 'king',
},
],
}
]
}
});
const spy = sinon.spy(releases, '_getReleaseData');
const actual = await releases.getReleases(releaseIDs);
console.log(actual);
expect(spy.called).to.eql(true);
expect(spy.callCount).to.eql(releaseIDs.length);
spy.restore();
});
it('returns an expected result containing errors when the release is not available', async () => {
const releaseIDs = [123456];
const releases = new Releases(authorisation, http, serviceDiscovery, Lock);
serviceDiscovery.discoverServiceDetails.resolves(
[
{
Address: address,
ServicePort: port,
alive: true,
},
]
);
nock(url, {
reqheaders: {
'authorization': 'Bearer ' + token,
'content-type': 'application/json',
},
})
.post('/graphql', JSON.stringify({
'query': `{
ReleaseFormat(qbe: {
id: 123456
asset_controller: {
id: 57753805
}
})
{
id,
asset_controller {
id,
name
},
display_author,
author {
id,
name,
},
}
}`
}))
.reply(200, {
data: {
ReleaseFormat: []
}
});
const expected = [
new Error(`Not a valid release for ID: ${releaseIDs[0]}`)
];
const actual = await releases.getReleases(releaseIDs);
console.log(actual);
expect(actual).to.be.an('array');
expect(actual.length).to.be.eql(expected.length);
expect(actual.filter(e => e instanceof Error).length).to.be.eql(1);
});
});
});