I have a service worker that is supposed to cache an offline.html page that is displayed if the client has no network connection. However, it sometimes believes the navigator is offline even when it is not. That is, navigator.onLine === false. This means the user may get offline.html instead of the actual content even when online, which is obviously something I'd like to avoid.
This is how I register the service worker in my main.js:
// Install service worker for offline use and caching
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/service-worker.js', {scope: '/'});
}
My current service-worker.js:
const OFFLINE_URL = '/mysite/offline';
const CACHE_NAME = 'mysite-static-v1';
self.addEventListener('install', (event) => {
event.waitUntil(
// Cache the offline page when installing the service worker
fetch(OFFLINE_URL, { credentials: 'include' }).then(response =>
caches.open(CACHE_NAME).then(cache => cache.put(OFFLINE_URL, response)),
),
);
});
self.addEventListener('fetch', (event) => {
const requestURL = new URL(event.request.url);
if (requestURL.origin === location.origin) {
// Load static assets from cache if network is down
if (/\.(css|js|woff|woff2|ttf|eot|svg)$/.test(requestURL.pathname)) {
event.respondWith(
caches.open(CACHE_NAME).then(cache =>
caches.match(event.request).then((result) => {
if (navigator.onLine === false) {
// We are offline so return the cached version immediately, null or not.
return result;
}
// We are online so let's run the request to make sure our content
// is up-to-date.
return fetch(event.request).then((response) => {
// Save the result to cache for later use.
cache.put(event.request, response.clone());
return response;
});
}),
),
);
return;
}
}
if (event.request.mode === 'navigate' && navigator.onLine === false) {
// Uh-oh, we navigated to a page while offline. Let's show our default page.
event.respondWith(caches.match(OFFLINE_URL));
return;
}
// Passthrough for everything else
event.respondWith(fetch(event.request));
});
What am I doing wrong?
navigator.onLine and the related events can be useful when you want to update your UI to indicate that you're offline and, for instance, only show content that exists in a cache.
But I'd avoid writing service worker logic that relies on checking navigator.onLine. Instead, attempt to make a fetch() unconditionally, and if it fails, provide a backup response. This will ensure that your web app behaves as expected regardless of whether the fetch() fails due to being offline, due to lie-fi, or due to your web server experiencing issues.
// Other fetch handler code...
if (event.request.mode === 'navigate') {
return event.respondWith(
fetch(event.request).catch(() => caches.match(OFFLINE_URL))
);
}
// Other fetch handler code...
Related
I'm trying to get just a simple working example of this going, but I feel like I'm misunderstanding something.
My page is dynamically generated (Django), but all I want is to register a service worker to have a fallback page if the user is offline anywhere in the app. I'm testing this on http://localhost:8000, so maybe this is keeping it from working?
This is what I was basing my code from, which I've copied 99% aside from the location of the offline HTML file, which is correctly getting cached, so I can verify it works.
https://googlechrome.github.io/samples/service-worker/custom-offline-page/
The SW is registered at the bottom of my HTML's body:
<script>
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/static/js/service-worker.js');
}
</script>
For /static/js/service-worker.js:
const OFFLINE_VERSION = 1;
const CACHE_NAME = 'offline';
// Customize this with a different URL if needed.
const OFFLINE_URL = '/static/offline/offline.html';
self.addEventListener('install', (event) => {
event.waitUntil((async () => {
const cache = await caches.open(CACHE_NAME);
// Setting {cache: 'reload'} in the new request will ensure that the response
// isn't fulfilled from the HTTP cache; i.e., it will be from the network.
await cache.add(new Request(OFFLINE_URL, {cache: 'reload'}));
})());
});
self.addEventListener('activate', (event) => {
event.waitUntil((async () => {
// Enable navigation preload if it's supported.
// See https://developers.google.com/web/updates/2017/02/navigation-preload
if ('navigationPreload' in self.registration) {
await self.registration.navigationPreload.enable();
}
})());
// Tell the active service worker to take control of the page immediately.
self.clients.claim();
});
self.addEventListener('fetch', (event) => {
// We only want to call event.respondWith() if this is a navigation request
// for an HTML page.
if (event.request.mode === 'navigate') {
event.respondWith((async () => {
try {
// First, try to use the navigation preload response if it's supported.
const preloadResponse = await event.preloadResponse;
if (preloadResponse) {
return preloadResponse;
}
const networkResponse = await fetch(event.request);
return networkResponse;
} catch (error) {
// catch is only triggered if an exception is thrown, which is likely
// due to a network error.
// If fetch() returns a valid HTTP response with a response code in
// the 4xx or 5xx range, the catch() will NOT be called.
console.log('Fetch failed; returning offline page instead.', error);
const cache = await caches.open(CACHE_NAME);
const cachedResponse = await cache.match(OFFLINE_URL);
return cachedResponse;
}
})());
}
// If our if() condition is false, then this fetch handler won't intercept the
// request. If there are any other fetch handlers registered, they will get a
// chance to call event.respondWith(). If no fetch handlers call
// event.respondWith(), the request will be handled by the browser as if there
// were no service worker involvement.
});
The worker successfully installs and activates. The offline.html page is successfully cached and I can verify this in Chrome Inspector -> Application -> Service Workers. I can also verify it's the correct service-worker.js file and not an old one.
If I switch Chrome to "Offline" and refresh the page, I still get the standard "No Internet" page. It also doesn't look like the "fetch" event happens on any normal page loads due to a "console.log" never getting fired.
Is the sample code I'm using outdated? Is this a limitation of trying this on Localhost? What am I doing wrong? Thank you.
I am trying to set up my website to have a fallback page when it is loaded without an internet connection. To do that, I am following this guide on web.dev: "Create an offline fallback page"
I modified the example ServiceWorker in the article to fit my purposes, including being able to serve external CSS and images in the fallback offline page:
// Incrementing OFFLINE_VERSION will kick off the install event and force
// previously cached resources to be updated from the network.
const OFFLINE_VERSION = 1;
const CACHE_NAME = "offline";
// Customize this with a different URL if needed.
const OFFLINE_URL = "offline.html";
self.addEventListener("install", (event) => {
event.waitUntil(
(async () => {
const cache = await caches.open(CACHE_NAME);
// Setting {cache: 'reload'} in the new request will ensure that the response
// isn't fulfilled from the HTTP cache; i.e., it will be from the network.
await cache.add(new Request(OFFLINE_URL, { cache: "reload" }));
await cache.add(new Request("offline.css", { cache: "reload" }));
await cache.add(new Request("logo.png", { cache: "reload" }));
await cache.add(new Request("unsupportedCloud.svg", { cache: "reload" }));
})()
);
});
self.addEventListener("activate", (event) => {
// Tell the active service worker to take control of the page immediately.
self.clients.claim();
});
self.addEventListener("fetch", (event) => {
// We only want to call event.respondWith() if this is a navigation request
// for an HTML page.
if (event.request.mode === "navigate") {
if (event.request.url.match(/SignOut/)) {
return false;
}
event.respondWith(
(async () => {
try {
const networkResponse = await fetch(event.request);
return networkResponse;
} catch (error) {
// catch is only triggered if an exception is thrown, which is likely
// due to a network error.
// If fetch() returns a valid HTTP response with a response code in
// the 4xx or 5xx range, the catch() will NOT be called.
console.log("Fetch failed; returning offline page instead.", error);
const cache = await caches.open(CACHE_NAME);
const cachedResponse = await cache.match(OFFLINE_URL);
return cachedResponse;
}
})()
);
}
});
However, when the offline.html page loads it does is unable to load the images and the CSS; the images fail to load with a 404 error and the request for the CSS doesn't even show in the Network tab of the browser dev console.
I would expect the images and CSS to be fetched from the ServiceWorker cache, but it seems that neither is.
Am I missing something on how ServiceWorkers cache requests or how they fetch them? Or on how to design the offline fallback page to work?
Turns out there were a few reasons why the assets were not being found.
The first reason is because when they were saved to cache, they were saved with the entire path where they are stored alongside the Service Worker file.
So the path that was saved was something along the lines of static/PWA/[offline.css, logo.png, unsupportedCloud.svg] but the path of the page that requested them was in the root. In offline.html I had to reference them as such: <img src="static/PWA/unsupportedCloud.svg" class="unsupported-cloud" />.
The second reason is that the Service Worker only checks for fetch events were of type navigation. In my example you can see I had written if (event.request.mode === "navigate") {...} so we only attempted to use the cache that we set up in navigation events, which would not catch fetch events to get assets. To fix that, I set up a new check for no-cors event modes: else if (event.request.mode === "no-cors") {...}.
These two fixes let me get assets from the offline cache that I set up on Service Worker installation. With some other minor fixes, this addresses my question!
I'm building an PWA with limited offline capability, I'm using this code to save page content to dynamic cache every time user visits a new url:
self.addEventListener('fetch', function(event) {
event.respondWith(
fetch(event.request)
.then(function(res) {
return caches.open('cache')
.then(function(cache) {
cache.put(event.request.url, res.clone());
return res;
})
})
.catch(function(err) {
console.log( err );
return caches.match(event.request);
})
);
});
This works great, after a page is loaded all of it assets are cached and can be seen in offline mode.
But, I would also like to add the option to automatically cache some of the more important urls when the user comes back online.
I do that by putting the list of urls in the array, loop through it and send a fetch request to each url, so those pages can be cached without user visiting/revisiting the page.
Problem is that when I do that some of the assets on some pages are not cached, for example google map on one page, is there a way to simulate real visit to a page, that gets all of the assets from an url with fetch request?
Fetch code:
function fillDynamicCache(user_id = false) {
let urls = [
'/homepage',
'/someotherpage',
'/thirdpage',
'/...',
];
urls.map((url, id) => (
fetch(url)
.then(
function(response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
console.log( 'in fetch: ' + url );
}
)
.catch(function(err) {
console.log('Fetch Error :-S', err);
})
));
}
self.addEventListener('message', (event) => {
// refresh cache when user comes back online
if (event.data == 'is_online') {
fillDynamicCache();
} else if (event.data == 'is_updated') {
self.skipWaiting();
Typically if you have important assets you want to provide the users, even when they are offline, you should consider an offline first strategy, meaning you prefetch those resources while the service worker is installing.
This way the matching requests will be served from the cache, improving the performance because you skip the relative network calls entirely.
In case the target resources tend to update/change frequently on the server, then you can opt for a stale while revalidate strategy (after the data is provided from the cache, the SW will update its value with a newer one from the network, if available) or even network first, fallback to cache, the latter if you want to provide always the latest values and provide cache data only if the network connection times out or is unavailable.
I wrote an article about service worker and caching strategies, in case you want to go deeper into the topic.
I have a React app created by using create-react-app. By default, this tool creates a serviceWorker.js file for us and I am using this to register a service-worker. Furthermore, the documents suggest using google's workbox wizard to create a service-worker.js used to manage my website for offline purposes. The goal is for me to store an offline.html page in the browsers cache and whenever there is no online connection, render the cached offline.html page.
I am successful in storing the offline.html in cache and as you can see below, it is stored in the precached URLS (check last two rows).
I can also manually navigate to the offline.html if i change the URL in my browser.
However, I am having trouble automatically grabbing this file and rendering it whenever there isn't a connection.
In the serviceWorker.js code that is generated for me from CRA theres a function called checkValidServiceWorker:
function checkValidServiceWorker(swUrl, config) {
// Check if the service worker can be found. If it can't reload the page.
fetch(swUrl)
.then(response => {
// Ensure service worker exists, and that we really are getting a JS file.
const contentType = response.headers.get('content-type');
if (
response.status === 404 ||
(contentType != null && contentType.indexOf('javascript') === -1)
) {
// No service worker found. Probably a different app. Reload the page.
navigator.serviceWorker.ready.then(registration => {
registration.unregister().then(() => {
window.location.reload();
});
});
} else {
// Service worker found. Proceed as normal.
registerValidSW(swUrl, config);
}
})
.catch(() => {
console.log(
'No internet connection found. App is running in offline mode.'
);
const OFFLINE_URL = '/.offline/offline.html';
return caches.match(OFFLINE_URL).then((response) => {
console.log(response)
});
});
}
So in the catch part of the function, I want to do my redirect because thats the logic that runs when we are offline. I read a lot of docs and my current solution doesn't work. Any ideas on how to redirect in my serviceWorker?
I'm having some problems on setting up a service worker for my website.
I only want to cache css/js/fonts and some images/svg, I don't want to cache the HTML since all of it is updated every minute.
It kinda works, but trying on my smartphone I keep getting the notification "Add to homescreen" even when I've already added it. And on the Chrome Dev app I don't get the Add button.
Also with the Lighthouse I get the following errors:
"Does not respond with a 200 when offline"
"User will not be prompted to Install the Web App, Failures: Manifest start_url is not cached by a Service Worker."
Right now my sw.js is like this. As you can see I commented the fetch part because it was caching the HTML and also the Cookies weren't working.
Is there around a simple Service Worker "template" to use?
const PRECACHE = 'app-name';
const RUNTIME = 'runtime';
// A list of local resources we always want to be cached.
const PRECACHE_URLS = [
'/css/file.css',
'/js/file.js',
'/images/logo.png',
'/fonts/roboto/Roboto-Regular.woff2'
]
// The install handler takes care of precaching the resources we always need.
self.addEventListener('install', event => {
event.waitUntil(
caches.open(PRECACHE)
.then(cache => cache.addAll(PRECACHE_URLS))
.then(self.skipWaiting())
);
});
// The activate handler takes care of cleaning up old caches.
self.addEventListener('activate', event => {
const currentCaches = [PRECACHE, RUNTIME];
event.waitUntil(
caches.keys().then(cacheNames => {
return cacheNames.filter(cacheName => !currentCaches.includes(cacheName));
}).then(cachesToDelete => {
return Promise.all(cachesToDelete.map(cacheToDelete => {
return caches.delete(cacheToDelete);
}));
}).then(() => self.clients.claim())
);
});
// The fetch handler serves responses for same-origin resources from a cache.
// If no response is found, it populates the runtime cache with the response
// from the network before returning it to the page.
self.addEventListener('fetch', event => {
// Skip cross-origin requests, like those for Google Analytics.
// if (event.request.url.startsWith(self.location.origin)) {
// event.respondWith(
// caches.match(event.request).then(cachedResponse => {
// if (cachedResponse) {
// return cachedResponse;
// }
// return caches.open(RUNTIME).then(cache => {
// return fetch(event.request).then(response => {
// // Put a copy of the response in the runtime cache.
// return cache.put(event.request, response.clone()).then(() => {
// return response;
// });
// });
// });
// })
// );
// }
});
I'm not sure why the install banner appears but the two errors given by lighthouse are related to the missing caching of the very start_url, propably index.html. So Lighthouse will always be telling you about those if you follow the caching strategy you described here.
I suggest you could try Workbox and their runtime caching. Runtime caching, in a nutshell, works like so: you specify urls like *.svg, *.css etc. and the SW caches them once the client first asks them. In the future, when the files are already cached, the SW serves them from the cache to the client. Basically you tell the SW to cache this and that kind of urls when it encounters them and not in advance.
Runtime caching could very well be accompanied by precaching (that may also be found from Workbox!) to cache a bunch of files.
Check it out here: https://workboxjs.org
They have a couple of examples and plugins for build tooling.