Service worker offline page won't load - javascript

This used to work for me but stopped a couple of months ago and I've tinkered my way right out of being able to figure this out anymore. What am I doing wrong here?
Call the service worker template, no problem:
if(navigator.serviceWorker){
window.addEventListener('load',() => {
navigator.serviceWorker
.register('/sw.js')
.then(console.log('[ServiceWorker] Registered Successfully'))
.catch(err => console.log(`[ServiceWorker] Error: ${err}`));
});
} else {
console.log('Service Worker not supported.');
}
Setup a cache version and preloaded the cache, no problem:
const cacheName='2020.10.06-01';
var cacheFiles = ['/offline.html'];
Installed the Services Worker, no problem:
addEventListener('install', e => {
e.waitUntil(
caches.open(cacheName).then(cache => {
return cache.addAll(cacheFiles);
})
);
});
Activated the Services Worker for auto cache rollover, no problem:
addEventListener('activate', e => {
e.waitUntil(
caches.keys().then(keyList => {
return Promise.all(keyList.map(key => {
if(key !== cacheName) {
return caches.delete(key);
}
}));
})
);
});
Fetching from cache or network, no problem:
addEventListener('fetch', e => {
e.respondWith(async function() {
try {
const cache = await caches.open(cacheName);
const cachedResponse = await cache.match(e.request);
const networkResponsePromise = fetch(e.request);
e.waitUntil(async function() {
const networkResponse = await networkResponsePromise;
await cache.put(e.request, networkResponse.clone());
}());
// Returned the cached response if we have one, otherwise return the network response.
return cachedResponse || networkResponsePromise;
} catch (error) {
console.log('Fetch failed; returning offline page instead.', error);
const cache = await caches.open(cacheName);
const cachedResponse = await cache.match('/offline.html');
return cachedResponse;
}
}());
});
But if the page/resource I'm trying to request is not already in the cache AND the network is not available it refuses to display my 'offline.html' page. (Which I know IS in the cache)
Any ideas?

Here's the Fetch code I wrote in the end that works perfectly for me:
self.addEventListener('fetch', (event) => {
event.respondWith((async() => {
const cache = await caches.open(cacheName);
try {
const cachedResponse = await cache.match(event.request);
if(cachedResponse) {
console.log('cachedResponse: ', event.request.url);
return cachedResponse;
}
const fetchResponse = await fetch(event.request);
if(fetchResponse) {
console.log('fetchResponse: ', event.request.url);
await cache.put(event.request, fetchResponse.clone());
return fetchResponse;
}
} catch (error) {
console.log('Fetch failed: ', error);
const cachedResponse = await cache.match('/en/offline.html');
return cachedResponse;
}
})());
});
This does everything I need, in a very specific order. It checks the cache first, if found it's returned. It checks the network next, if found it caches it first then returns it. Or it displays a custom offline page with a big Reload button to encourage visitors to try again when they are back online.
But the most important this to realise is that doing it this way alows me to display a page and all it's resources with or without network access.
UPDATE: In order to deal with changes to CORS security requirements that where implemented in all browsers between March and August of 2020, I had to make one small change to the 'fetch' event.
Changed from:
const fetchResponse = await fetch(event.request);
To:
const fetchResponse = await fetch(event.request, {mode:'no-cors'});

Replace your fetch event code with this one. For every request your fetch event will be invoked and it will check if your request is found in the cache file list then it will serve the file from there otherwise it will make the fetch call to get the file from server.
self.addEventListener("fetch", function (event) {
event.respondWith(
caches.match(event.request)
.then(function (response) {
if (response) {
return response;
}
return fetch(event.request);
})
);
});
Also you don't need a separate "offline.html" file in your cache file list. Instead add your main application html file and your relevant css and js files in that list. That will make your application completely offline in case of no network.

Related

Intercept fetch for the first time but not afterwards using serviceWorker

Need some guidance here with service worker.
When the service worker is installed, it caches the assets. On next reload, when any request is made, it is intercepted by service worker, which first checks in cache, if it isn't found, then we make a network call. But this second network call is again being intercepted by service worker and thus it has turned into an infinite loop.
I don't want the next fetch call, to be intercepted again. I hope I'm able to explain the issue here.
Here is the serviceWorker.js
const cacheVersion = "v11";
self.addEventListener('install',(event)=>{
self.skipWaiting();
event.waitUntil(caches.open(cacheVersion).then((cache)=>{
cache.addAll([
'/',
'/index.html',
'/style.css',
'/images/github.png',
])
.then(()=>console.log('cached'),(err)=>console.log(err));
}))
})
self.addEventListener('activate',event=>{
event.waitUntil(
(async ()=>{
const keys = await caches.keys();
return keys.map(async (cache)=>{
if(cache !== cacheVersion){
console.log("service worker: Removing old cache: "+cache);
return await caches.delete(cache);
}
})
})()
)
})
const cacheFirst = async (request) => {
try{
const responseFromCache = await caches.match(request);
if (responseFromCache) {
return responseFromCache;
}
}
catch(err){
return fetch(request);
}
return fetch(request);
};
self.addEventListener("fetch", (event) => {
event.respondWith(cacheFirst(event.request));
});
The reason here is your cacheFirst, it's a bit wrong. What do we want to do inside it (high-level algorithm) ? Should be something like this, right?
check cache and if match found - return
otherwise, fetch from server, cache and return
otherwise, if network failed - return some "dummy" response
const cacheFirst = async (request) => {
// First try to get the resource from the cache
const responseFromCache = await caches.match(request);
if (responseFromCache) {
return responseFromCache;
}
// Next try to get the resource from the network
try {
const responseFromNetwork = await fetch(request);
// response may be used only once
// we need to save clone to put one copy in cache
// and serve second one
putInCache(request, responseFromNetwork.clone());
return responseFromNetwork;
} catch (error) {
// well network failed, but we need to return something right ?
return new Response('Network error happened', {
status: 408,
headers: { 'Content-Type': 'text/plain' },
});
}
};
This is not ready-to-use solution !!! Think of it as a pseudo-code, for instance you might need to impl putInCache first.

I'm caching 900 items in my PWA. Is there a better way for offline capability?

I have a PWA with static content that must be 100% available offline. There are 400 HTML pages, and 450 PNG content images and the usual site assets. The total cache size is 21mb and content rarely changes, if ever. My service worker looks like this:
const all_assets = [...nnc, ...nc, ...checklist_content, ...info, ...fonts, ...other]
var cacheName = "ecl-cache-005"
self.addEventListener('install', function (event) {
event.waitUntil((async () => {
const cache = await caches.open(cacheName);
await cache.addAll(all_assets);
})());
});
self.addEventListener('activate', (e) => {
e.waitUntil(caches.keys().then((keyList) => {
return Promise.all(keyList.map((key) => {
if (key === cacheName) { return; }
return caches.delete(key);
}))
}));
});
self.addEventListener('fetch', function (event) {
event.respondWith(
// Try the cache
caches.match(event.request).then(function (response) {
// return it if there is a response,or else fetch again
return response || fetch(event.request);
})
);
});
The app works as intended; however, the install event is very slow, I assume due to the enourmous URL request size in the service worker or the 19mb of images, or both. Is there a better way? Can I cache the images in a separate event after the install?
I've looked into Firestore and IndexedDB options, but they seem not to be compatible with image contents.

Service worker returns offline html page for javascript files

I'm new to service workers and offline capabilities. I created a simple service worker to handle network requests and return a offline html page when offline. This was created following Google's guide on PWA.
The problem is that the service worker returns offline.html when requesting javascript files (not cached). It should instead return a network error or something. Here is the code:
const cacheName = 'offline-v1900'; //increment version to update cache
// cache these files needed for offline use
const appShellFiles = [
'./offline.html',
'./css/bootstrap.min.css',
'./img/logo/logo.png',
'./js/jquery-3.5.1.min.js',
'./js/bootstrap.min.js',
];
self.addEventListener("fetch", (e) => {
// We only want to call e.respondWith() if this is a navigation request
// for an HTML page.
// console.log(e.request.url);
e.respondWith(
(async () => {
try {
// First, try to use the navigation preload response if it's supported.
const preloadResponse = await e.preloadResponse;
if (preloadResponse) {
// console.log('returning preload response');
return preloadResponse;
}
const cachedResponse = await caches.match(e.request);
if (cachedResponse) {
// console.log(`[Service Worker] Fetching cached resource: ${e.request.url}`);
return cachedResponse;
}
// Always try the network first.
const networkResponse = await fetch(e.request);
return networkResponse;
} catch (error) {
// catch is only triggered if an exception is thrown, which is likely
// due to a network error.
// If fetch() returns a valid HTTP response with a response code in
// the 4xx or 5xx range, the catch() will NOT be called.
// console.log("Fetch failed; returning offline page instead.", error);
const cachedResponse = await caches.match('offline.html');
return cachedResponse;
}
})()
);
When offline, I open a url on my site, it loads the page from the cache but not all assets are cached for offline. So when a network request is made for, say https://www.gstatic.com/firebasejs/9.1.3/firebase-app.js, the response I get is the html of offline.html page. This breaks the page because of javascript errors.
It should instead return a network error or something.
I think the relevant sample code is from https://googlechrome.github.io/samples/service-worker/custom-offline-page/
self.addEventListener('fetch', (event) => {
// We only want to call event.respondWith() if this is a navigation request
// for an HTML page.
if (event.request.mode === 'navigate') {
event.respondWith((async () => {
try {
// First, try to use the navigation preload response if it's supported.
const preloadResponse = await event.preloadResponse;
if (preloadResponse) {
return preloadResponse;
}
const networkResponse = await fetch(event.request);
return networkResponse;
} catch (error) {
// catch is only triggered if an exception is thrown, which is likely
// due to a network error.
// If fetch() returns a valid HTTP response with a response code in
// the 4xx or 5xx range, the catch() will NOT be called.
console.log('Fetch failed; returning offline page instead.', error);
const cache = await caches.open(CACHE_NAME);
const cachedResponse = await cache.match(OFFLINE_URL);
return cachedResponse;
}
})());
}
// If our if() condition is false, then this fetch handler won't intercept the
// request. If there are any other fetch handlers registered, they will get a
// chance to call event.respondWith(). If no fetch handlers call
// event.respondWith(), the request will be handled by the browser as if there
// were no service worker involvement.
});
Specifically, that fetch handler checks to see whether event.request.mode === 'navigate' and only returns HTML when offline if that's the case. That's what's required to make sure that you don't end up returning offline HTML for other types of resources.

Service Worker (sw.js) should always return offline.html document if there is no network connection

I'm having an issue with a service worker that's working partially. The manifest defines the start_url correctly (https://example.com/start.html) for users that add the website to the Homescreen, and both the start.html and offline.html are cached correctly as well, and both are available while the browser has no internet connection.
If the user gets offline (no network connection), the service worker successfully serves both https://example.com/start.html and https://example.com/offline.html -- but if the user tries opening anything else (e.g. https://example.com/something.html) the browser throws a "site can't be reached" error message.
What I actually need, is that, if there is no network connection, the service worker always returns the offline.html cached document, no matter which url the user is trying to reach.
In other words, the problem is that the Service Worker is not properly serving offline.html for the user's requests when there's no network connection (whatever solution is found, it also needs to cache the start.html for the manifest's start_url).
This is my current code:
manifest.json
{
"name": "My Basic Example",
"short_name": "Example",
"icons": [
{
"src": "https://example.com/static/ico/manifest-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "https://example.com/static/ico/manifest-512x512.png",
"sizes": "512x512",
"type": "image/png",
"purpose": "any maskable"
}
],
"start_url": "https://example.com/start.html",
"scope": "/",
"display": "standalone",
"orientation": "portrait",
"background_color": "#2196f3",
"theme_color": "#2196f3"
}
core.js
if('serviceWorker' in navigator) {
navigator.serviceWorker.register('sw.js', {
scope: '/'
}).then(function(registration) {
}).catch(function(err) {
});
navigator.serviceWorker.ready.then(function(registration) {
});
}
sw.js
const PRECACHE = 'cache-v1';
const RUNTIME = 'runtime';
const PRECACHE_URLS = [
'/offline.html',
'/start.html'
];
self.addEventListener('install', event => {
event.waitUntil(
caches.open(PRECACHE)
.then(cache => cache.addAll(PRECACHE_URLS))
.then(self.skipWaiting())
);
});
self.addEventListener('activate', event => {
const currentCaches = [PRECACHE, RUNTIME];
event.waitUntil(
caches.keys().then(cacheNames => {
return cacheNames.filter(cacheName => !currentCaches.includes(cacheName));
})
.then(cachesToDelete => {
return Promise.all(cachesToDelete.map(cacheToDelete => {
return caches.delete(cacheToDelete);
}));
})
.then(() => self.clients.claim())
);
});
self.addEventListener('fetch', event => {
if(event.request.url.startsWith(self.location.origin)) {
event.respondWith(
caches.match(event.request).then(cachedResponse => {
if(cachedResponse) {
return cachedResponse;
}
return caches.open(RUNTIME).then(cache => {
return fetch(event.request).then(response => {
return cache.put(event.request, response.clone()).then(() => {
return response;
});
});
});
})
);
}
});
Any ideas? Thanks!
Most of your code worked as expected, but you needed a check to see if the user was requesting start.html. I took the code from Create an offline fallback page and modified it to suit your request.
// Incrementing OFFLINE_VERSION will kick off the install event and force
// previously cached resources to be updated from the network.
const OFFLINE_VERSION = 1;
const CACHE_NAME = "offline";
// Customize this with a different URL if needed.
const START_URL = "start.html";
const OFFLINE_URL = "offline.html";
self.addEventListener("install", (event) => {
event.waitUntil(
(async () => {
const cache = await caches.open(CACHE_NAME);
// Setting {cache: 'reload'} in the new request will ensure that the
// response isn't fulfilled from the HTTP cache; i.e., it will be from
// the network.
await Promise.all([
cache.add(new Request(OFFLINE_URL, { cache: "reload" })),
cache.add(new Request(START_URL, { cache: "reload" })),
]);
})()
);
// Force the waiting service worker to become the active service worker.
self.skipWaiting();
});
self.addEventListener("activate", (event) => {
event.waitUntil(
(async () => {
// Enable navigation preload if it's supported.
// See https://developers.google.com/web/updates/2017/02/navigation-preload
if ("navigationPreload" in self.registration) {
await self.registration.navigationPreload.enable();
}
})()
);
// Tell the active service worker to take control of the page immediately.
self.clients.claim();
});
self.addEventListener("fetch", (event) => {
// We only want to call event.respondWith() if this is a navigation request
// for an HTML page.
if (event.request.mode === "navigate") {
event.respondWith(
(async () => {
try {
// First, try to use the navigation preload response if it's supported.
const preloadResponse = await event.preloadResponse;
if (preloadResponse) {
return preloadResponse;
}
// Always try the network first.
const networkResponse = await fetch(event.request);
return networkResponse;
} catch (error) {
// catch is only triggered if an exception is thrown, which is likely
// due to a network error.
// If fetch() returns a valid HTTP response with a response code in
// the 4xx or 5xx range, the catch() will NOT be called.
console.log("Fetch failed; returning cached page instead.", error);
const cache = await caches.open(CACHE_NAME);
if (event.request.url.includes(START_URL)) {
return await cache.match(START_URL);
}
return await cache.match(OFFLINE_URL);
}
})()
);
}
// If our if() condition is false, then this fetch handler won't intercept the
// request. If there are any other fetch handlers registered, they will get a
// chance to call event.respondWith(). If no fetch handlers call
// event.respondWith(), the request will be handled by the browser as if there
// were no service worker involvement.
});
One thing to note with this, once start.html has been cached when the service worker is first installed, it will not be updated again until the service worker is updated. That means your users may see an old/outdated start.html any time they're offline and load your app. You probably want to use a network first strategy for start.html.
You can try the working demo and source

Service worker update not working?

I have a nginx hosted Jekyll site.
I have made several changes to my site, update package version and changed my javascript to update the service worker. However my changes are still not reflecting in chrome;
main.js
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/sw.js', {scope: 'sw-test'}).then(function(registration) {
// registration worked
console.log('Registration succeeded.');
registration.update();
}).catch(function(error) {
// registration failed
console.log('Registration failed with ' + error)
});
}
sw.js
var PRECACHE = 'precache-{{site.version}}';
var RUNTIME = 'runtime';
// A list of local resources we always want to be cached.
var PRECACHE_URLS = [
'./',
'/index.html',
'/assets/css/main.css',
'/assets/js/vendor/modernizr-custom.js',
'/assets/js/bundle.js'
];
// The install handler takes care of precaching the resources we always need.
self.addEventListener('install', event => {
event.waitUntil(
caches.open(PRECACHE)
.then(cache => cache.addAll(PRECACHE_URLS))
.then(self.skipWaiting())
);
});
// The activate handler takes care of cleaning up old caches.
self.addEventListener('activate', event => {
var currentCaches = [PRECACHE, RUNTIME];
event.waitUntil(
caches.keys().then(cacheNames => {
return cacheNames.filter(cacheName => !currentCaches.includes(cacheName));
}).then(cachesToDelete => {
return Promise.all(cachesToDelete.map(cacheToDelete => {
return caches.delete(cacheToDelete);
}));
}).then(() => self.clients.claim())
);
});
// The fetch handler serves responses for same-origin resources from a cache.
// If no response is found, it populates the runtime cache with the response
// from the network before returning it to the page.
self.addEventListener('fetch', event => {
// Skip cross-origin requests, like those for Google Analytics.
if (event.request.url.startsWith(self.location.origin)) {
event.respondWith(
caches.match(event.request).then(cachedResponse => {
if (cachedResponse) {
return cachedResponse;
}
return caches.open(RUNTIME).then(cache => {
return fetch(event.request).then(response => {
// Put a copy of the response in the runtime cache.
return cache.put(event.request, response.clone()).then(() => {
return response;
});
});
});
})
);
}
});
Chrome Screenshot
Screenshot
Any help greatly appreciated!!
Your nginx config is most likely setting some caching headers on the SW script. When that's the case, the browser returns the old version from its local cache skipping network completely.

Categories

Resources