I'm using Cloudflare service workers and I want on every request to:
request only the HTML (therefore count as only 1 request)
search the response for a string
Purge that page's cache if the message exists
I've solved points #2 and #3. Can't figure out if #1 is feasible or possible at all.
I need it as only one request because there is a limit per day on the number of free requests. Otherwise I have about 50-60 requests per page.
My current attempt for #1, which doesn't work right:
async function handleRequest(request) {
const init = {
headers: {
'content-type': 'text/html;charset=UTF-8',
},
};
const response = await fetch(request);
await fetch(request.url, init).then(function(response) {
response.text().then(function(text) {
console.log(text);
})
}).catch(function(err) {
// There was an error
console.warn('Something went wrong.', err);
});
return response;
}
addEventListener('fetch', event => {
return event.respondWith(handleRequest(event.request))
});
You can't request "only the html", the worker will act on any request that matches the route that it is deployed at. If you only care about the html, you will need to set up your worker path to filter to only the endpoints that you want to run the worker on.
Alternatively you can use the worker on every request and only do your logic if the response Content-Type is one that you care about. This would be something along these lines:
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request));
})
async function handleRequest(request) {
let response = await fetch(request);
let type = response.headers.get("Content-Type") || "";
if (type.startsWith("text/")) {
//this is where your custom logic goes
}
return response;
}
Related
Need some guidance here with service worker.
When the service worker is installed, it caches the assets. On next reload, when any request is made, it is intercepted by service worker, which first checks in cache, if it isn't found, then we make a network call. But this second network call is again being intercepted by service worker and thus it has turned into an infinite loop.
I don't want the next fetch call, to be intercepted again. I hope I'm able to explain the issue here.
Here is the serviceWorker.js
const cacheVersion = "v11";
self.addEventListener('install',(event)=>{
self.skipWaiting();
event.waitUntil(caches.open(cacheVersion).then((cache)=>{
cache.addAll([
'/',
'/index.html',
'/style.css',
'/images/github.png',
])
.then(()=>console.log('cached'),(err)=>console.log(err));
}))
})
self.addEventListener('activate',event=>{
event.waitUntil(
(async ()=>{
const keys = await caches.keys();
return keys.map(async (cache)=>{
if(cache !== cacheVersion){
console.log("service worker: Removing old cache: "+cache);
return await caches.delete(cache);
}
})
})()
)
})
const cacheFirst = async (request) => {
try{
const responseFromCache = await caches.match(request);
if (responseFromCache) {
return responseFromCache;
}
}
catch(err){
return fetch(request);
}
return fetch(request);
};
self.addEventListener("fetch", (event) => {
event.respondWith(cacheFirst(event.request));
});
The reason here is your cacheFirst, it's a bit wrong. What do we want to do inside it (high-level algorithm) ? Should be something like this, right?
check cache and if match found - return
otherwise, fetch from server, cache and return
otherwise, if network failed - return some "dummy" response
const cacheFirst = async (request) => {
// First try to get the resource from the cache
const responseFromCache = await caches.match(request);
if (responseFromCache) {
return responseFromCache;
}
// Next try to get the resource from the network
try {
const responseFromNetwork = await fetch(request);
// response may be used only once
// we need to save clone to put one copy in cache
// and serve second one
putInCache(request, responseFromNetwork.clone());
return responseFromNetwork;
} catch (error) {
// well network failed, but we need to return something right ?
return new Response('Network error happened', {
status: 408,
headers: { 'Content-Type': 'text/plain' },
});
}
};
This is not ready-to-use solution !!! Think of it as a pseudo-code, for instance you might need to impl putInCache first.
I am wondering if someone might be able to help figure out how to pass a post body to another endpoint with cloudflare workers?
I am trying to get the incoming request post to post to url.
const url = 'https://webhook.site/#!/b2f75ce2-7b9e-479a-b6f0-8934a89a3f3d'
const body = {
results: ['default data to send'],
errors: null,
msg: 'I sent this to the fetch',
}
/**
* gatherResponse awaits and returns a response body as a string.
* Use await gatherResponse(..) in an async function to get the response body
* #param {Response} response
*/
async function gatherResponse(response) {
const { headers } = response
const contentType = headers.get('content-type') || ''
if (contentType.includes('application/json')) {
return JSON.stringify(await response.json())
} else if (contentType.includes('application/text')) {
return response.text()
} else if (contentType.includes('text/html')) {
return response.text()
} else {
return response.text()
}
}
async function handleRequest() {
const init = {
body: JSON.stringify(body),
method: 'POST',
headers: {
'content-type': 'application/json;charset=UTF-8',
},
}
const response = await fetch(url, init)
const results = await gatherResponse(response)
return new Response(results, init)
}
addEventListener('fetch', (event) => {
return event.respondWith(handleRequest())
})
I created a worker at https://tight-art-0743.ctohm.workers.dev/, which basically forwards your POST request's body to a public requestbin. You can check what is it receiving at: https://requestbin.com/r/en5k768mcp4x9/24tqhPJw86mt2WjKRMbmt75FMH9
addEventListener("fetch", (event) => {
event.respondWith(
handleRequest(event.request).catch(
(err) => new Response(err.stack, { status: 500 })
)
);
});
async function handleRequest(request) {
let {method,headers}=request,
url=new URL(request.url)
// methods other than POST will return early
if(method!=='POST') return new Response(`Your request method was ${method}`);
const forwardRequest=new Request("https://en5k768mcp4x9.x.pipedream.net/", request)
forwardRequest.headers.set('X-Custom-Header','hey!')
return fetch(forwardRequest)
}
You can see it working with a simple CURL request
curl --location --request POST 'https://tight-art-0743.ctohm.workers.dev/' \
--header 'Content-Type: application/json' \
--data-raw '{"environment": {"name": "Sample Environment Name (required)"}}'
Two things worth noting, in the worker's code:
I'm passing the original request as the init parameter, through which original headers and body are transparently forwarded to the requestbin, also allowing for some extra header manipulation if neeeded.
In this example I'm not actually doing anything with the request body. Therefore there's no need to await it. You just connect incoming and outgoing streams and let them deal with each other.
Another example: let's add a /csv route. Requests starting with /csv will not forward your POST body. Instead they will download a remote CSV attachment and POST it to the requestbin. Again, we aren't awaiting for the actual CSV contents. We pass a handle to the response body to the forwarding request
async function handleRequest(request) {
let {method,headers}=request,
url=new URL(request.url)
if(method!=='POST') return new Response(`Your request method was ${method}`);
const forwardRequest=new Request("https://en5k768mcp4x9.x.pipedream.net/",request)
if(url.pathname.includes('/csv')) {
const remoteSource=`https://cdn.wsform.com/wp-content/uploads/2018/09/country_full.csv`,
remoteResponse=await fetch(remoteSource)
return fetch(forwardRequest,{body:remoteResponse.body})
}
forwardRequest.headers.set('X-Custom-Header','hey!')
return fetch(forwardRequest)
}
While your code should theoretically work, the fact that you're unwrapping the response means your worker could be aborted due to hitting time limits, or CPU, or memory. On the contrary, when using the streams based approach,
your worker's execution finishes as soon as it returns the forwarding fetch. Even if the outgoing POST is still running, this isn't subject to CPU or time limits.
I make an HTTP call with Axios meant to respond with 200 (tested on Postman).
But the response is not triggered in try/catch/finally logic and the execution weirdly stops.
The code looks like this:
(async () => {
const axios = require('axios');
const url = ''; // The URL responds with a HTML page
const cookies = ''; // The cookies are checked with Postman
const config = {
headers: {
Cookie: cookies
}
};
try {
let res = await axios.get(url, config);
console.log('Response received.');
console.log(res);
} catch(err) {
console.log('Error happened.');
console.log(err);
} finally {
console.log('Finally block.');
}
console.log('End of execution.');
})()
All console.log aren't called. I even tried to put debugger or other actions not related to writing on stdout but they aren't called either.
The status returned by the process is 0.
I just found the solution, my program was stopping because my request wasn't resolved.
I needed to set the connection header to keep-alive, and now it works properly.
When I call the URL, an undefined number of requests are sent.
Now I try to find out if one of the requests contains a certain payload.
cy.server();
cy.visit(url);
cy.route({
method: 'POST',
url: '**/t/e/**',
}).as('xhrRequest');
I have found a similar approach on How to capture all API calls in cypress? so far.
the problem here is that a fixed number of API calls is assumed.
cy.wait(Array(60).fill('#xhrRequest'), { timeout: 30000 }).then((xhrs) => {
xhrs.forEach((res) => {
expect(res.status).not.to.be.null
})
})
How do I get it that all requests are intercepted and fail my test if there is not a single request containing the payload.
I already wrote something like this in puppeteer
let hasSpecialRequest = false;
page.on('request', request => {
if (isSpecialRequest(request)) {
hasSpecialRequest = true;
}
request.continue();
});
await page.setRequestInterception(true);
expect(hasSpecialRequest).to.equal(true);
The system checks whether each request is one of the special requests and sets the variable accordingly. Something like this I try to recreate with Cypress.
You might consider doing cy.exec and run a script in another language and return status from subprocess.
I may have misunderstood the problem but since I got here through my Google expedition maybe this might help someone that had my problem and possibly you.
At first I used the cy.wait(#alias) but could never retrieve all the responses (only one response was being shown and I couldn't figure out how to access ALL the responses). So I ended up immediately storing the responses into another array to access within the test.
let xhrRequests;
function getXhrRequests() {
xhrRequests = [];
cy.intercept('GET', '**', (res) => {
xhrRequests.push(res);
});
return xhrRequests;
}
describe('Some', function () {
it('Thing 1', () => {
let thing1 = getXhrRequests();
cy.visit('http://some.site');
thing1.forEach((res) => {
expect(res.status).not.to.be.null;
})
}
it('Thing 2', () => {
let thing2 = getXhrRequests();
cy.visit('http://some.site/2');
thing2.forEach((res) => {
expect(res.status).not.to.be.null;
})
}
});
I'm using a Service Worker for working offline. so for Each Fetch request, i store it in the cache.
Now,
I'd like that the service worker will make a request, and store it as well for next time.
the problem is when i use fetch(myUrl).then... , the Fetch Listener self.addEventListener('fetch', function(e)...in the service worker doesn't catch it.
I wouldn't like to duplicate code.. any Ideas ?
The fetch listener is:
self.addEventListener('fetch', function(e) {
// e.respondWidth Responds to the fetch event
e.respondWith(
// Check in cache for the request being made
caches.match(e.request)
.then(function(response) {
// If the request is in the cache
if ( response ) {
console.log("[ServiceWorker] Found in Cache", e.request.url, response);
// Return the cached version
return response;
}
// If the request is NOT in the cache, fetch and cache
var requestClone = e.request.clone();
return fetch(requestClone)
.then(function(response) {
if ( !response ) {
console.log("[ServiceWorker] No response from fetch ")
return response;
}
var responseClone = response.clone();
// Open the cache
caches.open(cacheName).then(function(cache) {
// Put the fetched response in the cache
cache.put(e.request, responseClone);
console.log('[ServiceWorker] New Data Cached', e.request.url);
// Return the response
return response;
}); // end caches.open
// returns the fresh response (not cached..)
return response;
})
.catch(function(err) {
console.log('[ServiceWorker] Error Fetching & Caching New Data', err);
});
}) // end caches.match(e.request)
.catch(function(e){
// this is - if we still dont have this in the cache !!!
console.log("[ServiceWorker] ERROR WITH THIS MATCH !!!",e, arguments)
})// enf of caches.match
); // end e.respondWith
});
Since i cant comment to get any specific details and your code seems right to me, my guesses are:
You Service worker is not registered, or running. you can mak sure It is running by checking the application tab of your inspector. It should look like the following:
You assume It is not working because of the messages not being logged to the console.
Service workers run on a different environment from your page, so the messages are logged to a different console that you can check by clicking the inspect button in the image above.
The Cache then Network strategy is probably what you are looking for:
https://developers.google.com/web/fundamentals/instant-and-offline/offline-cookbook/#cache-then-network
var networkDataReceived = false;
startSpinner();
// fetch fresh data
var networkUpdate = fetch('/data.json').then(function(response) {
return response.json();
}).then(function(data) {
networkDataReceived = true;
updatePage();
});
// fetch cached data
caches.match('/data.json').then(function(response) {
if (!response) throw Error("No data");
return response.json();
}).then(function(data) {
// don't overwrite newer network data
if (!networkDataReceived) {
updatePage(data);
}
}).catch(function() {
// we didn't get cached data, the network is our last hope:
return networkUpdate;
}).catch(showErrorMessage).then(stopSpinner);