Use ServiceWorker cache only when offline - javascript

I'm trying to integrate service workers into my app, but I've found the service worker tries to retrieve cached content even when online, but I want it to prefer the network in these situations. How can I do this? Below is the code I have now, but I don't believe it is working. SW Install code is omitted for brevity.
var CACHE_NAME = 'my-cache-v1';
var urlsToCache = [
/* my cached file list */
];
self.addEventListener('install', function(event) {
// Perform install steps
event.waitUntil(
caches.open(CACHE_NAME)
.then(function(cache) {
console.log('Opened cache');
return cache.addAll(urlsToCache);
})
);
});
/* request is being made */
self.addEventListener('fetch', function(event) {
event.respondWith(
//first try to run the request normally
fetch(event.request).catch(function() {
//catch errors by attempting to match in cache
return caches.match(event.request).then(function(response) {
// Cache hit - return response
if (response) {
return response;
}
});
})
);
});
This seems to lead to warnings like The FetchEvent for "[url]" resulted in a network error response: an object that was not a Response was passed to respondWith(). I'm new to service workers, so apologies for any mistaken terminology or bad practices, would welcome any tips. Thank you!

Without testing this out, my guess is that you're not resolving respondWith() correctly in the case where there is no cache match. According to MDN, the code passed to respondWith() is supposed to "resolve by returning a Response or network error to Fetch." So why not try just doing this:
self.addEventListener('fetch', function(event) {
event.respondWith(
fetch(event.request).catch(function() {
return caches.match(event.request);
})
);
});

Why don't you open the cache for your fetch event?
I think the process of a service worker is :
Open your cache
Check if the request match with an answer in your cache
Then you answer
OR (if the answer is not in the cache) :
Check the request via the network
Clone your answer from the network
Put the request and the clone of the answer in your cache for future use
I would write :
self.addEventListener('fetch', event => {
event.respondWith(
caches.open(CACHE_NAME).then(cache => {
return cache.match(event.request).then(response => {
return response || fetch(event.request)
.then(response => {
const responseClone = response.clone();
cache.put(event.request, responseClone);
})
})
}
);
});

event.respondWith() expects a promise that resolves to Response. So in case of a cache miss, you still need to return a Response, but above, you are returning nothing. I'd also try to use the cache first, then fetch, but in any case, as the last resort, you can always create a synthetic Response, for example something like this:
return new Response("Network error happened", {"status" : 408, "headers" : {"Content-Type" : "text/plain"}});

Related

Service worker do not serve cached resources

Although I see similar questions regarding the subject, there are not the same. So, I have a pwa app that is basically a simple form that users must fill out. The service worker catch the resources and serves to the app, as usual. The strategy is 'cache first, then network'. All is ok, when is onLine, BUT in offLine mode, the cached resources are not used by the app, I mean, in spite that (you can see) in the cache are the resources(fetch requests) that the app needs, it anyway try to fetch to the web and obviously because there is offLine, the fetch fail and the app crash. So, the code lines ...
caches.match(e.request)
.then( res => {
if (res ){
return res;
}
...
is not working. My question....Why???.
I will appreciate you help/comments.
You should add some more context and code, to let others better understand your situation.
Do you serve data from the cache like the following example?
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.open('mysite-dynamic').then(function(cache) {
return cache.match(event.request).then(function (response) {
return response || fetch(event.request).then(function(response) {
cache.put(event.request, response.clone());
return response;
});
});
})
);
});

How to handle 206 responses in Firefox service workers

While testing service workers for a project and using this example from google:
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Names of the two caches used in this version of the service worker.
// Change to v2, etc. when you update any of the local resources, which will
// in turn trigger the install event again.
const PRECACHE = 'precache-v1';
const RUNTIME = 'runtime';
// A list of local resources we always want to be cached.
const PRECACHE_URLS = [
'index.html',
'./', // Alias for index.html
'styles.css',
'../../styles/main.css',
'demo.js'
];
// The install handler takes care of precaching the resources we always need.
self.addEventListener('install', event => {
event.waitUntil(
caches.open(PRECACHE)
.then(cache => cache.addAll(PRECACHE_URLS))
.then(self.skipWaiting())
);
});
// The activate handler takes care of cleaning up old caches.
self.addEventListener('activate', event => {
const currentCaches = [PRECACHE, RUNTIME];
event.waitUntil(
caches.keys().then(cacheNames => {
return cacheNames.filter(cacheName => !currentCaches.includes(cacheName));
}).then(cachesToDelete => {
return Promise.all(cachesToDelete.map(cacheToDelete => {
return caches.delete(cacheToDelete);
}));
}).then(() => self.clients.claim())
);
});
// The fetch handler serves responses for same-origin resources from a cache.
// If no response is found, it populates the runtime cache with the response
// from the network before returning it to the page.
self.addEventListener('fetch', event => {
// Skip cross-origin requests, like those for Google Analytics.
if (event.request.url.startsWith(self.location.origin)) {
event.respondWith(
caches.match(event.request).then(cachedResponse => {
if (cachedResponse) {
return cachedResponse;
}
return caches.open(RUNTIME).then(cache => {
return fetch(event.request).then(response => {
// Put a copy of the response in the runtime cache.
return cache.put(event.request, response.clone()).then(() => {
return response;
});
});
});
})
);
}
});
source: https://github.com/GoogleChrome/samples/blob/gh-pages/service-worker/basic/service-worker.js
I discovered that Firefox (in contrast to safari and chrome) throws errors within event.waitUntil() as well as event.respondWith() if something does not work with the fetch requests (even if its just a 206 partial content response):
Service worker event waitUntil() was passed a promise that rejected
with 'TypeError: Cache got basic response with bad status 206 while
trying to add request
That behaviour breaks the installer. If I add a .catch() to the installer like this
self.addEventListener('install', event => {
event.waitUntil(
caches.open(PRECACHE)
.then(cache => cache.addAll(PRECACHE_URLS))
.then(self.skipWaiting())
.catch(function(err){
console.log(err);
self.skipWaiting();
})
);
});
I presume the first 206 will make the precache stop (?)
Also after that the sw gets installed but once in a while I get a
Service worker event respondWith() was passed a promise that rejected
with 'TypeError: Cache got basic response with bad status 206 while
trying to add request
and even if that does not happen, if I try to open the a link to the url that threw the 206 error while installation/precaching I get:
Failed to load ‘https://xxxx/yyyyy.mp3’. A
ServiceWorker passed a promise to FetchEvent.respondWith() that
rejected with ‘TypeError: Cache got basic response with bad status 206
while trying to add request
https://xxxx/yyyyy.mp3’.
how can I handle this kind of error properly? catching like above doesn't make much sense for it breaks the forced precaching. And even if that would be acceptable, it seems to interfere with every request happening from then on and might cause trouble later on.
one half of the problem i could solve by moving the return statement from within the cache.put() function outside of it:
self.addEventListener('fetch', event => {
// Skip cross-origin requests, like those for Google Analytics.
if (event.request.url.startsWith(self.location.origin)) {
event.respondWith(
caches.match(event.request).then(cachedResponse => {
if (cachedResponse) {
return cachedResponse;
}
return caches.open(RUNTIME).then(cache => {
return fetch(event.request).then(response => {
// Put a copy of the response in the runtime cache.
cache.put(event.request, response.clone()).then(() => {
console.log("logged a file into RUNTIME:");
console.log(response);
});
return response; // and return anyhow whatever came back
});
});
})
);
}
});
this way the sw does not wait for the cache.put() to be successful and yet it gets cached most of the times.
this solves the most urgent issue but problems still are
a) forced precaching still gets cancelled by 206 responses
b) in case I would want to make sure that requests are cached in runtime, i would still need to make some retry() function or sth.

How to fallback to browser's default fetch handling within event.respondWith()?

Within the service worker my fetch handler looks like this:
self.addEventListener('fetch', function (event) {
event.respondWith(
caches.match(event.request).then(function (response) {
return response || fetch(event.request); //<-- is this the browser's default fetch handling?
})
);
});
The method event.respondWith() forces me to handle all requests myself including xhr requests which is not what I like todo. I only want the cached resources to be returned if available and let the browser handle the rest using the browser's default fetch handling.
I have two issues with fetch(event.request):
Only when devtools is opened it produces an error while fetching the initial URL which is visible in the address bar https://test.de/x/#/page. It happens both on initial install and on every reload:
Uncaught (in promise) TypeError: Failed to execute 'fetch' on 'ServiceWorkerGlobalScope': 'only-if-cached' can be set only with 'same-origin' mode`
and I don't understand why because I am not setting anything
It seems to violate the HTTP protocol because it tries to request a URL with an anchor inside:
Console: {"lineNumber":0, "message":"The FetchEvent for
\"https://test.de/x/#/page\" resulted in a network error
response: the promise was rejected.", "message_level":2, "sourceIdentifier":1, "sourceURL":""}`
How does fetch() differ from the browser's default fetch handling and are those differences the cause for those errors?
Additional information and code:
My application also leverages the good old appCache in parallel with the service worker (for backwards compatibility). I am not sure if the appcache interferes with the service worker installation on the initial page load. The rest of the code is pretty straight forward:
My index.html at https://test.de/x/#/page uses appcache and a base-href:
<html manifest="appcache" lang="de">
<head>
<base href="/x/"/>
</head>
...
Service Worker registration within the body script
window.addEventListener('load', {
navigator.serviceWorker.register('/x/sw.js')
});
Install and activate event
let MY_CACHE_ID = 'myCache_v1';
let urlsToCache = ['js/main.js'];
self.addEventListener('install', function (event) {
event.waitUntil(
caches.open(MY_CACHE_ID)
.then(function (cache) {
return cache.addAll(
urlsToCache.map(url => new Request(url,
{credentials:'include'}))
)
})
);
});
self.addEventListener('activate', function (event) {
//delete old caches
let cacheWhitelist = [MY_CACHE_ID];
event.waitUntil(
caches.keys().then(function (cacheNames) {
return Promise.all(
cacheNames.map(function (cacheName) {
if (cacheWhitelist.indexOf(cacheName) === -1) {
return caches.delete(cacheName);
}
})
);
})
);
});
fetch(event.request) should be really close to the default. (You can get the actual default by not calling respondWith() at all. It should mostly not be observable, but is with CSP and some referrer bits.)
Given that, I'm not sure how you're ending up with 1. That should not be possible. Unfortunately, you've not given enough information to debug what is going on.
As for 2, it passes the fragment on to the service worker, but that won't be included in the eventual network request. That matches how Fetch is defined and is done that way to give the service worker a bit of additional context that might be useful sometimes.

Show offline cache when server is unreachable

Is it possible to show an offline cache of my website when the server is down?
All the examples I can find regarding offline pages has to do with the client being offline. What I need is to show the user a cached version of my site if the server can't be reached.
I've read about the Cache Manifest in HMTL 5 but it's getting removed and it causes to many problems.
What can be done without using any other loadbalancing servers and such?
I recently learned that with Fetch API and service workers its dead simple:
First, you register the Service worker:
if (!navigator.serviceWorker) return;
navigator.serviceWorker.register('/sw.js')
Then configure it to cache whats needed:
self.addEventListener('install', function(event) {
event.waitUntil(
caches.open(staticCacheName).then(function(cache) {
return cache.addAll([
'/',
'js/main.js',
'css/main.css',
'imgs/icon.png',
]);
})
);
});
And use Fetch API to get cached peaces if no response from the call:
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.match(event.request).then(function(response) {
return response || fetch(event.request);
})
);
});
if need to get cached version only if server is down, try something like:
self.addEventListener('fetch', function(event) {
event.respondWith(
return fetch(event.request).then(function(response) {
if (response.status !== 200) {
caches.match(event.request).then(function(response) {
return response;
}).catch(function(error) {
console.error('Fetching failed:', error);
throw error;
});
})
);
});
p.s. using Fetch API seems much nicer way than implementing old and nasty XMLHttpRequest.

Only in Chrome (Service Worker): '... a redirected response was used for a request whose redirect mode is not "follow" '

When I refresh (or go offline) in Chrome then I get "This site can't be reached" and the following logged to console: The FetchEvent for "http://localhost:8111/survey/174/deployment/193/answer/offline/attendee/240/" resulted in a network error response: a redirected response was used for a request whose redirect mode is not "follow".. When I refresh in Firefox everything works fine. Could someone explain why this is happening?
Here is my simplified SW.
importScripts("/static/js/libs/idb.js")
var CACHE_NAME = "upshot-cache-version3"
var urlsToCache = [...]
self.addEventListener("install", event => {
event.waitUntil(
caches
.open(CACHE_NAME)
.then(cache => {
urlsToCache.map(url => cache.add(url))
})
)
})
self.addEventListener("activate", event => {
clients.claim()
})
self.addEventListener('fetch', event => {
event.respondWith(
caches
.match(event.request)
.then(response => {
if (response) {
return response
}
var fetchRequest = event.request.clone()
return fetch(fetchRequest).then(response => {
if (!response || response.status !== 200 || response.type !== 'basic') {
return response
}
var responseToCache = response.clone()
caches.open(CACHE_NAME).then(cache => cache.put(event.request, responseToCache))
return response
})
})
)
})
This is due to a (relatively) recent change in the security restrictions around what kind of responses can be used to satisfy navigations. It should apply to all browsers that support service workers (i.e. both Chrome and Firefox today), but it's possible that you're testing with a version of Firefox that's out of date.
The background on what changed can be found in this issue tracker entry, and there's also more background on the decision that led to the underlying specification.
In terms of modifying your service worker to handle the security restriction, if you're currently responding to navigation requests for certain URLs with a HTTP 30x redirect to a different URL, you'll need to take care not to just store that redirected response directly in the cache.
You can tell whether a given response was redirected by checking whether response.redirected is true, and if so, use code along the lines of this (adapted from the Workbox project) to create a "clean" copy of the response that could then be stored in the cache:
function cleanResponse(response) {
const clonedResponse = response.clone();
// Not all browsers support the Response.body stream, so fall back to reading
// the entire body into memory as a blob.
const bodyPromise = 'body' in clonedResponse ?
Promise.resolve(clonedResponse.body) :
clonedResponse.blob();
return bodyPromise.then((body) => {
// new Response() is happy when passed either a stream or a Blob.
return new Response(body, {
headers: clonedResponse.headers,
status: clonedResponse.status,
statusText: clonedResponse.statusText,
});
});
}

Categories

Resources