ltijs: Deep Linking hanging when sending back the data to the LMS - javascript

I'm loosing my sleep trying to make ltijs work with Deep Linking.
I've built a simple client that registers a tool against an LMS platform. I'm able to setup the tool on the platform (Moodle and Canvas) and perform launches.
The issue I've is when trying to add "Deep Linking" to my tool. Following the directions in the ltjs docs (https://cvmcosta.me/ltijs/#/deeplinking) I've setup an "lti.onDeepLinking()" handler, that redirects to an html page where some content can be selected.
I then post this form to my own tool express server, where its data gets encoded and an auto-submit form is generated and sent back to the client (and then auto-submitted to the LMS).
This is where things "hang". I clearly see the POST request being sent over to the deep_link_redirect_url as requested by the original deep link request message, but the LMS then just "hangs" there until it eventually times out (I guess) and shows a generic error page...
I'm pretty sure I'm missing some vital piece of the puzzle here but I've no clue on what it could be..
const path = require("path");
const lti = require("ltijs").Provider;
const ltiKey = "myverylongandspecialltikeyfortesting";
toolUrl = "http://192.168.1.25:3010";
// setup provider
lti.setup(ltiKey, {
url: "mongodb://127.0.0.1:3001/lticlient",
}, {
// options
appRoute: "/lti/launch",
loginRoute: "/lti/login",
cookies: {
secure: false, // Set secure to true if the testing platform is in a different domain and https is being used
sameSite: "None" // set it to "None" if the testing platform is in a different domain and https is being used
},
devMode: true, // set it to false when in production and using https,
keysetRoute: "/lti/keys",
});
// set the lti launch callback
lti.onConnect((token, req, res) => {
console.log("IDTOKEN", token);
return res.send("LTI TOOL LAUNCHED!");
});
lti.onDeepLinking((token, req, res) => {
console.log("DEEP LINKING", token);
// Call redirect function to deep linking view
lti.redirect(res, '/deeplink')
})
// GET request to show the selection page
lti.app.get("/deeplink", async (req, res) => {
res.sendFile(path.join(__dirname, '/public/select.html'))
});
// POST submit from selection page with selected content item
lti.app.post("/deeplink", async (req, res) => {
const resource = req.body
const items = [
{
type: 'ltiResourceLink',
title: resource.product,
url: `${toolUrl}/lti/launch`,
custom: {
product: resource.product
}
}
]
const form = await lti.DeepLinking.createDeepLinkingForm(res.locals.token, items, { message: 'Successfully registered resource!' })
console.log("RETURNING SELF-SUBMITTING FORM", form);
return res.send(form);
})
const getPlatforms = () => {
return [
{
url: "http://192.168.1.239",
name: "MoodleClient1",
clientId: "client-id-provided-by-Moodle",
authenticationEndpoint: "http://192.168.1.239/mod/lti/auth.php",
accesstokenEndpoint: "http://192.168.1.239/mod/lti/token.php",
authConfig: { method: 'JWK_SET', key: "http://192.168.1.239/mod/lti/certs.php" }
}
];
}
const registerPlatforms = async () => {
const platforms = getPlatforms();
platforms.forEach(async (cfg) => {
console.log(`Registering platform ${cfg.name}`);
await lti.deletePlatform(cfg.url, cfg.clientId);
await lti.registerPlatform(cfg);
const platform = await lti.getPlatform(cfg.url, cfg.clientId);
await platform.platformActive(true)
});
}
const setup = async () => {
await lti.deploy({ port: 3010 });
registerPlatforms();
console.log("platforms registered and active");
}
setup();

Related

NodeJS Requested data leaks across HTTP requests

I have a simple webserver created in Express.js. The server serves files that are dynamically created by processing data from a third-party API.
Here is my webserver code, it requests builder.js to build the file, which requests, receives, processes and returns data from a third-party API by awaiting the response of a promisified oauth request as needed. The builder will at least call the API two or more times to create a complete file ready for serving.
const express = require('express');
const app = express();
const builder = require('./builder.js');
let requestID = 0;
app.get('/', function (req, res) {
requestID++;
console.log(`tab: ${requestID}<`);
res.set('Content-Type', 'text/plain; charset=UTF-8')
res.status(200).send('Hello World');
console.log(`tab: ${requestID}>`);
});
app.get('/file', async function (req, res) {
requestID++;
console.log(`tab: ${requestID}<`);
if (req.query["id"] != undefined) {
let URLparams = new URLSearchParams(req.query);
console.log(`tab: ${requestID}, requested id: ${URLparams.get("id")}`);
let output = await builder.buildFile(URLparams);
try {
console.log(`tab: ${requestID}, requested id: ${URLparams.get("q")}, served ${getIDfromOutput(output)}`);
res.set('Content-Type', 'application/rss+xml; charset=UTF-8')
res.status(200).send(output);
} catch(e) {
console.log(`tab: ${requestID}, ${e}`);
if (e instanceof String) { res.send(JSON.stringify(JSON.parse(e), null, 3)); }
else { res.send(JSON.stringify(e, null, 3)); }
};
} else {
res.set('Content-Type', 'text/plain; charset=UTF-8')
res.status(404)
.send("404: Page not found.");
}
console.log(`tab: ${requestID}>`);
});
app.listen(3000, "localhost");
The code works as intended when making requests to the /file one at a time.
//1 tab loaded
tab: 1<
tab: 1, requested: 1331444331778101248
tab: 1, requested: 1331444331778101248, loaded 1331444331778101248
tab: 1>
However, when the endpoint is requested for multiple unique requests at the same time (opening multiple tabs at the same time or running parallel wget commands), the server either responds correctly in some cases, but it mostly responds with the same file served previously.
// 5 unique tabs loaded at the same time: 1551641441679597569, 1448115610173558787, 1370689539505860613, 1328121208022446086, 1509637745140019212
tab: 1<
tab: 1, requested: 1551641441679597569
tab: 2<
tab: 2, requested: 1448115610173558787
tab: 2, requested: 1551641441679597569, loaded 1551641441679597569
tab: 2>
tab: 3<
tab: 3, requested: 1370689539505860613
tab: 3, requested: 1448115610173558787, loaded 1448115610173558787
tab: 3>
tab: 3, requested: 1370689539505860613, loaded 1370689539505860613
tab: 3>
The result of these simultaneous requests causes tabs 1-4 load fine, but tab 5 shows the output of tab 4. The console logger can't also seem to show the issue, but it's definitely different to the normal, one-off request.
I do not want this to happen as I fear that this may happen in production, and I do not want the outputs to be leaked across requests. However, I have no idea what is causing this or how to investigate to fix this issue. The code works fine when builder.buildFile() has to make one API call to the third-party, but I am always making 2 or more calls.
As requested, here is the stripped version of the buildFile function. When serving directly from the API through builder.getData(url), the responses for each request is unique and the responses do not cross over other requests. It happens only when it goes through builer.buildFile(). I feel like the issue is with the way the promises are handles, but I am not sure.
const OAuth = require('oauth');
const { promisify } = require('util');
require('dotenv').config();
let oauth = getOAuth();
module.exports = {
buildFile: async function (urlParam) { //URLSearchParams(req.query)
let id = urlParam.get("id");
try {
let metadata = await getData(`http://API/id=${id}?type=metadata`);
let XMLFile = await fileBuilder(metadata);
return XMLFile;
} catch (e) {
console.log("Err: ");
console.log(e);
return Promise.reject(e);
}
},
getData: async function (url) {
return await getData(url);
}
}
async function fileBuilder(metadata) {
let response = await getData(`$http://API/id=${id}?type=fulldata`);
response = extendData(response); //calls await getData() once more to fill in any gaps in the initial response
let xml = ``;
/* Build XMl file as a string, appending as the function processes the data*/
return xml;
}
function getOAuth() {
return new OAuth.OAuth(
'https://API/request_token',
'https://API/access_token',
process.env.api_key,
process.env.api_secret,
'1.0A', null, 'HMAC-SHA1'
);
}
async function getData(url) {
const get = promisify(oauth.get.bind(oauth));
let body;
try {
body = await get(
url,
process.env.access_key,
process.env.access_secret
);
} catch (e) {
console.log("getData failed: \n" + JSON.stringify(e));
return Promise.reject(e);
}
return JSON.parse(body);
}
You see the mixup in the console because you are using the shared requestId after it was changed. In order to avoid this you need to fix it at the beginning of the function.
The problem you have with the wrong file being served might come from the buildFile function since I can't locate it in this code fragment.
app.get('/file', async (req, res) => {
const localReqId = requestID++;
console.log(`tab: ${localReqId}<`);
if (req.query.id !== undefined) {
const URLparams = new URLSearchParams(req.query);
console.log(`tab: ${localReqId}, requested id: ${URLparams.get('id')}`);
const output = await builder.buildFile(URLparams);
try {
console.log(`tab: ${localReqId}, requested id: ${URLparams.get('q')}, served ${getIDfromOutput(output)}`);
res.set('Content-Type', 'application/rss+xml; charset=UTF-8');
res.status(200).send(output);
} catch (e) {
console.log(`tab: ${localReqId}, ${e}`);
if (e instanceof String) {
res.send(JSON.stringify(JSON.parse(e), null, 3));
} else {
res.send(JSON.stringify(e, null, 3));
}
}
} else {
res.set('Content-Type', 'text/plain; charset=UTF-8');
res.status(404)
.send('404: Page not found.');
}
console.log(`tab: ${localReqId}>`);
});

How to wait for a variable to be populated by an api request before passing it to a webpage as an argument?

I'm new to JavaScript and cannot seem to make this work , the topic of quiz depends on the user input... when the user presses next , I get the topic (this also takes user to the main quiz page), then i have to fetch data from the api with the topic as a parameter... I have to process the result of the fetch operation.. Then I have to pass that info to to the main quiz page... but the variable that is supposed to be populated by the fetch request is still undefined when i pass is to the main quiz page
var Allquestions;
var sheetdb = require('sheetdb-node');
// create a config file
var config = {
address: 'https://sheetdb.io/api/v1/9djmf8ydc7hwy',
};
//sheetdb
// Create new client
var client = sheetdb(config);
function downloadquestions(topic) {
console.log(topic);
client.read({ limit: 2, sheet: topic }).then(function(data) {
console.log(data + " in client.read func")
processQuestions(data);
}, function(err){
console.log(err);
});
}
async function processQuestions(data) {
console.log(data + "data in process");
Allquestions = JSON.parse(data);
console.log(Allquestions[0].Question + " This is defined");
}
app.get("/", (req, res) => {
res.render("pages/index", { title: "Home"});
});
// app.post("/" , urlencodedParser ,(req , res) => {
// console.log(req.body.topic);
// })
app.get("/questions", urlencodedParser , (req , res) => {
downloadquestions(req.body.topic);
console.log(Allquestions + " this is undefined");
res.render("/pages/quizpage" , {Allquestions})
})
There are a few issues with your code, you have a broken promise chain, client.read( is a promise, and that promise is going nowhere. You either return it, or await it. To be able to await your will need to also mark your route (req, res) as async too.
Your code is a little mixed up, you have Allquestions as a global var, this isn't great for multi-user, as the last topic is going to override this each time.
Also try and avoid swallowing exceptions in utility functions, try and keep your exception handling at the top level, eg. in your case inside your req/res handler.
So with all this in mind, your refactored code could look something like ->
const sheetdb = require('sheetdb-node');
// create a config file
const config = {
address: 'https://sheetdb.io/api/v1/9djmf8ydc7hwy',
};
//sheetdb
// Create new client
const client = sheetdb(config);
async function downloadquestions(topic) {
const data = await client.read({ limit: 2, sheet: topic });
return processQuestions(data);
}
function processQuestions(data) {
return JSON.parse(data);
}
app.get("/", (req, res) => {
res.render("pages/index", { title: "Home"});
});
app.get("/questions", urlencodedParser , async (req , res) => {
try {
const allQuestions = await downloadquestions(req.body.topic);
res.render("/pages/quizpage" , {Allquestions});
} catch (e) {
console.error(e);
res.end('There was an error');
}
})

Cypress: remove all cookies with intercepted route

I'm intercepting my login and logout routes in my functional tests with Cypress. (I have to stub them because the Magic technology I'm using for authentication does NOT support a test mode for the server side SDK, yet.)
Here is the code for the routes:
import {
loginRoute,
logoutRoute,
} from 'features/user-authentication/user-authentication-api';
// ...
cy.intercept(loginRoute, request => {
request.reply({
headers: {
'Set-Cookie': `magic-auth-token=${Cypress.env(
'validMagicAuthToken',
)}`,
},
statusCode: 200,
body: { success: true },
});
});
cy.intercept(logoutRoute, request => {
request.reply({
headers: {
'Set-Cookie': `magic-auth-token=; Max-Age=-1; Path=/`,
},
statusCode: 302,
});
});
I'm mimicking the original route's behavior, where they add and remove cookies. The login route's stub works perfectly. However, the stub for the login route does not.
The original logout route looks like this:
import { parse, serialize } from 'cookie';
// ...
function removeTokenCookie<T>(response: NextApiResponse<T>) {
const cookie = serialize(TOKEN_NAME, '', {
maxAge: -1,
path: '/',
});
response.setHeader('Set-Cookie', cookie);
}
const logoutHandler: NextApiHandler = async (request, response) => {
const session = await getSession(request);
if (session) {
await magic.users.logoutByIssuer(session.issuer);
}
removeTokenCookie(response);
response.writeHead(302, { Location: '/' });
response.end();
};
How can I remove the cookies using the logout route's stub? For some reason the cookie does NOT get removed when I set the headers as I did above.
Cypress has the clearCookie command, but it can't be used inside the intercept callback.
cy.intercept(logoutRoute, request => {
cy.clearCookie('magic-auth-token')
request.reply...
})
This is the error
CypressError
Cypress detected that you returned a promise from a command while also invoking one or more cy commands in that promise.
The cy command you invoked inside the promise was: cy.clearCookie()
Looking at the source code for clearCookie, it boils down to the internal command
Cypress.automation('clear:cookie', { name: <cookie-name> })
While it's an internal command, it's use has been demo'd here Cypress Automation and here Testing an Application in Offline Network Mode
The type definitions were added recently Add type for Cypress.automation #7573
Here's a proof of concept,
it('clears cookies in intercept', () => {
cy.setCookie('magic-auth-token', '1234')
cy.getCookies().should('have.length', 1)
cy.intercept('*', (req) => {
Cypress.automation('clear:cookie', { name: 'magic-auth-token' })
})
cy.visit('http://example.com').then(() => {
// after the request has been intercepted
cy.getCookies().should('have.length', 0)
})
})

Login system works properly in Postman but in not in browser

Here is my router
router.post("/login", async (req, res) =>
{
try
{
const user = await User.findByCredentials(req.body.email, req.body.password)
// console.log(user)
const token = await user.generateAuthToken()
// console.log(token)
res.redirect("/takvim")
}
catch(e)
{
res.status(400).redirect("/")
}
})
Here is my user model that I use in the function above
UserSchema.methods.generateAuthToken = async function ()
{
const user = this
const token = jwt.sign({_id: user._id.toString()}, "secret")
user.tokens = user.tokens.concat({token})
await user.save()
return token
}
UserSchema.statics.findByCredentials = async function (emails, passwords)
{
const user = await User.findOne({email: emails})
console.log(user)
const isMatch = await bcrypt.compare(passwords, user.password)
if(!isMatch)
{
throw new Error("unable to login")
}
return user
}
I am making the request from frontend using a button
$uyeolForm.addEventListener("submit", () =>
{
if(!$uyeolFormEmail.value.includes(".com"))
{
return $uyeolFormHata.innerHTML = "email geçersiz"
}
const xhr = new XMLHttpRequest();
let form = JSON.stringify({
email: $uyeolFormEmail.value,
password: $uyeolFormPassword.value
});
xhr.open("POST", "/login")
xhr.setRequestHeader('Content-type', 'application/json')
xhr.send(form);
})
Problem is when I am using the postman, application redirects me to the page i want and doesn't give an error.
When I send the request with button it still finds user but it doesn't redirect me to the page I expect and in the console i see the user(expected) and null which is not expected.
Thanks to everyone.
You are making an HTTP request with XMLHttpRequest when a submit event is triggered but you aren't preventing the default behaviour of a form submission.
So the XMLHttpRequest object is created and makes a request and then immediately after (and possibly cancelling the request depending on how quickly things go) the <form> is submitted to the URL specified in the action.
You said the endpoint was being hit twice, once where you get the user you expect and ones where you don't.
When you get the user you expect it is from the XHR submission.
When you don't, that is from the regular form submission (which won't be JSON encoded as HTML forms don't support JSON encoding so it doesn't find the user because it doesn't decode the data in the form correctly).
Since you said you wanted to redirect, don't use Ajax. Ajax is a method for making an HTTP request without leaving the current page.
Change the server-side code to accept the data in the format the <form> is encoding it (probably application/x-www-form-urlencoded unless you changed it with the enctype attribute).
You want to know what's the error message, always. Add a console.error(JSON.stringify(e))
before the response, and tell us what does it say.
catch(e)
{
console.error(JSON.stringify(e));
res.status(400).redirect("/");
}
If You're going to use application/json and application/x-www-form-urlencoded to support both ajax and usual form submission way - You've to redirect it on frontend level by reading Location header:
$uyeolForm.addEventListener("submit", (event) => {
event.preventDefault();
if(!$uyeolFormEmail.value.includes(".com")) {
return $uyeolFormHata.innerHTML = "email geçersiz"
}
fetch('/login', {
method: "POST",
credentials: "include",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
email: $uyeolFormEmail.value,
password: $uyeolFormPassword.value
})
})
.then(function(response) {
if (response.redirected) {
const redirectTo = response.headers.get('Location');
if (redirectTo) {
window.location.href = redirectTo;
return;
}
}
})
.catch(function(error) {
alert(error.message);
});
})
keep in mind that to support both application/json and application/x-www-form-urlencoded You've to attach 2 body parsers as middleware:
const bodyParser = require('body-parser');
router.use(bodyParser.urlencoded(true));
router.use(bodyParser.json());

How do I listen for new uploads from a specific channel in the YouTube API?

I am making a Discord bot, and I want it to be able to use the YouTube API to fetch new uploads from a specific channel.
I have searched elsewhere, but they all say how to upload videos, not how to track uploads.
Is this possible, and how can I do it?
Edit: Tried PubSubHubbub but it was very confusing and I couldn't get it to work
Here an example built on top of Node.js (v12) and Fastify and published with ngrok:
I wrote some comments explaining what it is happening:
const fastify = require('fastify')({ logger: true })
const xmlParser = require('fast-xml-parser')
const { URLSearchParams } = require('url')
const fetch = require('node-fetch')
// add an xml parser
fastify.addContentTypeParser('application/atom+xml', { parseAs: 'string' }, function (req, xmlString, done) {
try {
const body = xmlParser.parse(xmlString, {
attributeNamePrefix: '',
ignoreAttributes: false
})
done(null, body)
} catch (error) {
done(error)
}
})
// this endpoint needs for authentication
fastify.get('/', (request, reply) => {
reply.send(request.query['hub.challenge'])
})
// this endpoint will get the updates
fastify.post('/', (request, reply) => {
console.log(JSON.stringify(request.body, null, 2))
reply.code(204)
reply.send('ok')
})
fastify.listen(8080)
.then(() => {
// after the server has started, subscribe to the hub
// Parameter list: https://pubsubhubbub.github.io/PubSubHubbub/pubsubhubbub-core-0.4.html#rfc.section.5.1
const params = new URLSearchParams()
params.append('hub.callback', 'https://1f3dd0c63e78.ngrok.io') // you must have a public endpoint. get it with "ngrok http 8080"
params.append('hub.mode', 'subscribe')
params.append('hub.topic', 'https://www.youtube.com/xml/feeds/videos.xml?channel_id=UCfWbGF64qBSVM2Wq9fwrfrg')
params.append('hub.lease_seconds', '')
params.append('hub.secret', '')
params.append('hub.verify', 'sync')
params.append('hub.verify_token', '')
return fetch('https://pubsubhubbub.appspot.com/subscribe', {
headers: { 'content-type': 'application/x-www-form-urlencoded' },
body: params,
method: 'POST'
})
})
.then((res) => {
console.log(`The status must be 204. Received ${res.status}`)
// shows the error if something went wrong
if (res.status !== 204) {
return res.text().then(txt => console.log(txt))
}
})
I used my channel id to do some testing, consider that the notification is not in real-time, the POSTs are triggered after several minutes usually.

Categories

Resources