I'm really having a hard time trying to make pagination work with documentDB with the node SDK.
This is what i have:
find: function (querySpec) {
var self = this;
return new Promise((resolve, reject) => {
var options = {
maxItemCount: 2
}
var query = self.client.queryDocuments(self.collection._self, querySpec, options);
query.executeNext(function (err, results, headers) {
if (err) {
reject(err);
} else {
self.continuation = headers['x-ms-continuation'];
resolve({ 'results': results, 'resource': headers['x-ms-resource-usage'] });
}
})
});
},
nextPage: function () {
var self = this;
console.log(this.continuation)
return new Promise((resolve, reject) => {
var options = {
maxItemCount: 2
}
self.continuation ? options['continuation'] = self.continuation : '';
var query = self.client.readDocuments(self.collection._self, options);
query.executeNext(function (err, results, headers) {
if (err) {
reject(err);
} else {
resolve({ 'results': results, 'resource': headers['x-ms-resource-usage'] });
}
})
});
}
Continuation Token:
{"token":"-RID:K0JYAKIH9QADAAAAAAAAAA==#RT:1#TRC:2","range":{"min":"","max":"FF"}}
Return Error:
{ code: 400,
body: '{"code":"BadRequest","message":"Message: {\\"Errors\\":[\\"The continuation token specified in the request is malformed. Please re-run the query without it and iterate.\\"]}\\r\\nActivityId: 9e04278a-3808-4a59-8e63-dae191ef3519, Request URI: ...."}',
activityId: '9e04278a-3808-4a59-8e63-dae191ef3519' }
So basically I'm loading the page with limit of 2 items. Then for next page I'm sending the continuation token, but I'm getting back the error malformed token.
What am I doing wrong?
Related
I currently have the code below, which was created from a previous question I posted last year here.
var imaps = require('imap-simple');
var configBauerEmail = {
imap: {
user: '********#hotmail.com',
password: '******',
host: 'imap-mail.outlook.com',
port: 993,
tls: true,
authTimeout: 30000
}
};
module.exports = {
'delete any existing emails...': function () {
imaps.connect(configBauerEmail).then(function (connection) {
connection.openBox('INBOX').then(function () {
var searchCriteria = ['ALL'];
var fetchOptions = { bodies: ['TEXT'], struct: true
};
return connection.search(searchCriteria, fetchOptions);
})
//Loop over each message
.then(function (messages) {
let taskList = messages.map(function (message) {
return new Promise((res, rej) => {
var parts = imaps.getParts(message.attributes.struct);
parts.map(function (part) {
return connection.getPartData(message, part)
.then(function (partData) {
//Display e-mail body
if (part.disposition == null && part.encoding != "base64") {
console.log(partData);
}
//Mark message for deletion
connection.addFlags(message.attributes.uid, "\Deleted", (err) => {
if (err) {
console.log('Problem marking message for deletion');
rej(err);
}
res();
});
});
});
});
});
return Promise.all(taskList).then(() => {
connection.imap.closeBox(true, (err) => {
if (err) {
console.log(err);
}
});
connection.end();
});
});
});
},
'send email to seller and wait for mailbox notification': function (browser) {
browser.url(browser.launch_url + browser.globals.testDealerBfsAdevertEmailTest);
browser.notificationDismissal();
browser.cmpDismissal();
browser.emailFunctionality.emailTheSeller();
browser.browserEnd();
},
'get new email info': function() {
const createPromise = ms => new Promise((resolve, reject) => {
setTimeout(() => resolve(ms), ms);
});
function findUnseenEmails(connection) {
return connection.openBox('INBOX').then(function () {
var searchCriteria = ['UNSEEN'];
var fetchOptions = {
bodies: ['HEADER','TEXT'],
markSeen: false
};
return connection.search(searchCriteria, fetchOptions).then(function (results) {
var subjects = results.map(function (res) {
return res.parts.filter(function (part) {
return part.which === 'HEADER';
})
[0].body.subject[0];
});
console.log(subjects);
if (subjects.length > 0) {
connection.end();
return subjects;
} else {
return createPromise(60000).then(function() {
return findUnseenEmails(connection);
});
}
});
});
}
imaps.connect(configBauerEmail).then(function (connection) {
return findUnseenEmails(connection);
})
.then((subjects) => console.log('finished', subjects));
},
};
This works OK, in that the following loop that was added will loop over every 60 seconds checking that the email has 'arrived' in the mailbox.
if (subjects.length > 0) {
connection.end();
return subjects;
} else {
return createPromise(60000).then(function() {
return findUnseenEmails(connection);
});
}
});
However, at present if the email sending process has failed and the email account does not receive the email, the test will carry on looping continuously until the test is physically stopped.
What I'd now like to do is set some sort of 'time limit' within this loop, so that if the email has not arrived in the mailbox within 30 minutes the test will fail.
I appreciate that this will involve a limit setting in the loop above, but I've tried it in several locations within the loop and I can't get it to work.
Any help would be greatly appreciated. Thanks.
I am using lambda custom authoriser in front of api endpoint.
so whenever a request comes on that endpoint api gateway calls authoriser to validate jwt token and based on validation it generates a policy.
Also jwt tokens are generated by auth0 api, i know there is a library https://www.npmjs.com/package/mock-jwks which mocks auth0 jwt token but it a http request to endpoint which is not possible in my case
the code is pretty self explanatory howver i stuck with mocking jsonwebtoken and jwks-rsa library? what could be the ideal way to test such kind of function?
my authorizer.js
require('dotenv').config();
const jwks = require('jwks-rsa');
const jwt = require('jsonwebtoken');
const createPolicyDocument = (effect) => {
const policy = {
Version: '2012-10-17',
Statement: [
{
Effect: effect,
Action: 'execute-api:Invoke',
Resource:
'xxxxxxxxxxxxxxxxx',
},
],
};
return policy;
};
// Extract the Bearer token from the event sent by lambda and return it to the authorizer
const extractToken = (event) => {
const tokenWithBearer = event.authorizationToken;
if (!tokenWithBearer) {
throw new Error(' "event.authorization" paramters is missing');
}
const bearer = tokenWithBearer.split(' ');
if (!bearer) {
throw new Error('Invalid token');
}
return bearer[1];
};
const jwtVerifyOptions = {
audience: process.env.AUDIENCE,
issuer: process.env.TOKEN_ISSUER,
};
const client = jwks({
cache: true,
rateLimit: true,
jwksRequestsPerMinute: 10,
jwksUri: process.env.JWKS_URI,
});
function verifyToken(token) {
return new Promise((resolve, reject) => {
const tempDecodedToken = jwt.decode(token, { complete: true });
console.log(token);
console.log(tempDecodedToken);
const { kid } = tempDecodedToken.header;
client.getSigningKey(kid, (err, key) => {
if (err) {
console.log('signinin key get error', err);
reject('Deny');
}
const signingKey = key.publicKey || key.rsaPublicKey;
console.log(signingKey);
jwt.verify(token, signingKey, jwtVerifyOptions, (error, decoded) => {
if (error) {
console.log('jwt verify error', error);
reject('Deny');
}
console.log(decoded);
resolve({ response: 'Allow', decoded });
});
});
});
}
module.exports.auth = async (event, context, callback) => {
try {
const token = extractToken(event);
const tokenResponse = await verifyToken(token);
console.log(tokenResponse);
if (tokenResponse.response === 'Allow') {
return {
principalId: tokenResponse.decoded.sub,
policyDocument: createPolicyDocument('Allow'),
context: { scope: tokenResponse.decoded.scope },
};
}
return {
policyDocument: createPolicyDocument('Deny'),
};
} catch (err) {
return {
policyDocument: createPolicyDocument('Deny'),
};
}
};
I have stored 2775 urls in my mlab database and then I take each URL down to get more information. All of the URL I store in an Array then pass it into a function to process .However, The code only run up to about 1700 urls and process it and then stop. Here is my code (sorry about the code, this is my first time using stackoverflow :
Product.find({}, (err, foundProducts) => {
if (err) {
console.log("err " + err);
} else {
foundProducts.forEach(function(foundProduct) {
var updateProduct = service.updateTikiProduct(foundProduct.url);
});
}
});
updateTikiProduct: function(url) {
const options = {
url: url,
json: true
};
request(options,
function(err, res, body) {
// SOME code to crawl data
Product.findOneAndUpdate({
url: options.url
}, {
$set: {
name: name,
brand: brand,
store: store,
location: location,
base_category: categoryType,
top_description: topDescription,
feature_description: featureDescription
}
}, {
upsert: true,
new: true
}, (err, createdProduct) => {
if (err) {
reject(err);
} else {
var currentDate = new Date();
if (!createdProduct.hasOwnProperty("price")) {
createdProduct.price.push({
current: currentPrice,
origin: originPrice
});
createdProduct.save();
} else if (createdProduct.hasOwnProperty("price") &&
createdProduct.price[0].date.getDate() != currentDate.getDate()) {
createdProduct.price.push({
current: currentPrice,
origin: originPrice
});
createdProduct.save();
console.log("Update price");
}
counter++;
console.log("url : " + options.url);
console.log("Created product " + counter + " success!");
}
});
}
i guess mongo have limits to get items from db, you should try findAll or https://stackoverflow.com/a/3705615/4187058
I think your code is not processing all the elements is because you are processing all the elements in parallel, which will stop processing at one time when the memory will get full.
foundProducts.forEach(function(foundProduct) {
var updateProduct = service.updateTikiProduct(foundProduct.url);
});
what you should do is process them in series. you can use async await for that, do the following changes it will work :-
for(let foundProduct of foundProducts){
var updateProduct = await
service.updateTikiProduct(foundProduct.url);
};
I've been banging my head against a wall for some time now. I have some node code that pulls a bunch of googlecalendar events and then maps these onto a different object structure for use in my webapp... As part of this I need to match the users in the google calendar event with the users in my webapp's database. Here's the code:
router.post('/getCalendar', function(req,res) {
// authentication
var oauth2Client = getOAuthClient();
var session = req.session;
oauth2Client.setCredentials({
access_token: req.user.google.token,
refresh_token: req.user.google.refreshToken,
expiry_date: true
})
var p = new Promise(function (resolve, reject) {
// get the next 20 calendar events
var calendar = google.calendar('v3');
calendar.events.list({
auth: oauth2Client,
calendarId: 'primary',
timeMin: (new Date()).toISOString(),
maxResults: 20,
singleEvents: true,
orderBy: 'startTime'
}, function(err, response) {
if (err) console.log('err',err)
// displays all the events
if (response) console.log('response',response)
if(response) {
var events = response.items;
resolve(events || err);
}
});
}).then(function (data) {
var newdata = data.map(function(item) {
// create the new object structure and populate with some details
newObj = {
_id: item.id,
_title: item.summary,
group: [req.headers.group],
fields: [],
assignedTo: [],
google: "calendar",
googledata: item,
_category: []
}
var extendedFields = {}
// get the list of attendees
attendees = item.attendees.map(function(user) {
var attendee = user.email;
return attendee;
})
// pulls the user information for each attendee using promises
var promises = attendees.map(function(email) {
return new Promise(function(resolve, reject) {
// data is in a mongodb, using mongoose
User.findOne({'google.email': email}).exec(function (err, user) {
if (err) return resolve(null);
resolve(user); //not using reject at all so the Promise.all shouldn't fail
});
})
})
Promise.all(promises)
.then(function(value) {
value.forEach(function(user) {
if(promise !== null) {
newObj.assignedTo.push(user);
}
});
return newObj
}).catch((error) => {console.log(error)})
})
res.json(newdata)
})
});
I am using Promise.all to wait till all the promises have resolved, but it seems to move onto the next item in the data array before completing the first. Any guidance would be welcome. Please let me know if more details are required.
Thank you.
return promise of last Promise.all.
return Promise.all(promises)
.then(function(value) {
value.forEach(function(user) {
// user !== null ?
if(promise !== null) {
newObj.assignedTo.push(user);
}
});
return newObj
})
.catch((error) => {console.log(error)})
use Promise.all on newdata.
Promise.all(newdata)
.then(function (newObjs) {
// newObjs is the Array of newObj in your code
res.json(newObjs);
})
.catch(function (err) {
console.log(err);
});
I'm new to Promises so I might be doing something stupid here, but I can't seem to figure it out.
Just so I know I'm on the right path, a bit of information upfront. I have an authenticate method which returns a promise:
APIWrapper.prototype.authenticate = function() {
var self = this;
return new Promise(function(resolve, reject) {
request({
uri: self.httpUri + '/auth/authenticate',
method: 'GET',
headers: {
auth_user: self.user,
auth_pass: self.pass,
auth_appkey: self.appkey
}
}, function(err, res, body) {
if (err) return reject(err);
self.parser.parseXML(body, function(err, result) {
if (err) return reject(err);
if (result.error) { return reject(result.error) }
self.token = result.auth.token[0];
return resolve(result);
});
});
});
};
I chain this with .getDashboards() like this:
wrapper.authenticate().then(function() {
wrapper.getDashboards();
}).then(function(result) {
console.log('result', result);
});
.getDashboards() also returns a promise:
APIWrapper.prototype.getDashboards = function() {
var self = this;
return new Promise(function(resolve, reject) {
request({
url: self.httpUri + '/user/dashboard',
method: 'GET',
headers: {
auth_appkey: self.appkey,
auth_token: self.token
}
}, function(err, res, body) {
if (err) { return reject('Could not connect to the API endpoint'); };
self.parser.parseXML(body, function(err, data) {
var dashboards = [];
if(err) { return reject(err); }
if(data.error) { return reject(data.error); }
for(var i = 0; i < data.Dashboards.Dashboard.length; i++) {
dashboards.push(self.getDashboard(data.Dashboards.Dashboard[i]));
}
// returns early here
resolve(dashboards);
});
});
});
};
With the .getDashboard() method like this at the moment:
APIWrapper.prototype.getDashboard = function(db) {
var dashboard = {};
dashboard.title = db.Title[0];
dashboard.id = db.$.id;
console.log(dashboard);
return dashboard;
};
What happens with this code is that it returns the result before it returns the dashboards. I suspect the resolve() in .getDashboards() doesn't wait for the for loop to finish? Do I need to use promises in the .getDashboard() method as well, or how would I wait for it to finish before resolving my .getDashboards() promise?
Output:
> result undefined
{ title: 'Dashboard 1', id: '3271' }
{ title: 'Dashboard 2', id: '3272' }
{ title: 'Dashboard 3', id: '3273' }
I'm using this Promise implementation at the moment: https://github.com/then/promise
You need to return the promise to have it chained :
wrapper.authenticate().then(function() {
return wrapper.getDashboards();
}).then(function(result) {
console.log('result', result);
});
In your case, it can be simplified as
wrapper.authenticate()
.then(wrapper.getDashboards)
.then(function(result){
console.log('result', result);
});
You also don't seem to handle errors. The then library seems very raw on this point, so you should probably add a second argument :
wrapper.authenticate()
.then(wrapper.getDashboards, onAuthenticateError)
.then(function(result){
console.log('result', result);
}, onDashboardError);