How to use below code in WorkboxSW to register routes for all per-caching urls. This per-cached urls includes ajax that will go to server also!
$.ajax({
url : '/MyAPI/Records',
type : 'GET',
dataType:'json',
success : function(data) {
alert('Records: '+data);
//build urls array to get all records details
var urls = [];
urls.push('/MyAPI/Records')
$(data).each(function (index) {
urls.push('/MyAPI/Records/' + data[index].id + '/data')
});
//add all urls to cache
var requests = urls.map(url => new Request(url));
caches.open('my-cache').then(cache => {
return cache.addAll(requests).then(() => {
// At this point, `cache` will be populated with `Response` objects,
// and `requests` contains the `Request` objects that were used.
}).catch(error => console.error(`Oops! ${error}`));
});
},
error : function(request,error)
{
alert("Request: "+JSON.stringify(request));
}
});
Workbox's precaching relies on having access to a local file representing the resource at build time. This allows it to generate a hash of each resource its managing (based on the local file's contents) and then keep that cached resource up to date whenever the local file changes.
What you're suggestion sounds more like Workbox's support for handling certain routes via runtime caching. You can configure it via something like:
// Replace with your actual API endpoint.
const API_URL = 'https://my-api-server.com/api/restEndpoint';
// Use whichever strategy you'd prefer: networkFirst, staleWhileRevalidate, etc.
const apiCallHandler = workboxSW.strategies.networkFirst({
cacheName: 'my-api'
});
workboxSW.registerRoute(
API_URL,
apiCallHandler
);
This will result in responses from https://my-api-server.com being added to the cache named my-api at runtime, after you make your first request. (In this particular case, using the networkFirst strategy, those cached responses will only be used if the network is unavailable.)
If you're not okay with the runtime cache starting out "cold" and you feel like it needs to be primed, then you could do that by writing your own install event handler alongside your Workbox code:
// Your existing WorkboxSW code goes here.
self.addEventListener('install', event => {
event.waitUntil(
caches.open('my-api')
.then(cache => cache.add(API_URL))
);
});
Related
I transformed my React application into a PWA and it is working partially fine.
I followed this tutorial: https://medium.com/#toricpope/transform-a-react-app-into-a-progressive-web-app-pwa-dea336bd96e6
However this article only shows how to cache static data and I also need to store data stemming from the server, I could do this following the instruction of the first answer of this post: How can I cache data from API to Cache Storage in React PWA? and inserting the firebase adresses where the data is stored into the array urlsToCache, populated by the files that should be stored into the cache.
So far so good, however after the data be stored into the cache, the application stops fetching data from the server and loads the page only with data from the cache, even if the server is updated. This is what I need to fix.
In short, I need to fetch the data from the server, store it into the cache in order to use it when the application is offline and update the cache every time the server is reached.
I am trying to follow this guide, but without success: https://developers.google.com/web/fundamentals/instant-and-offline/offline-cookbook/#serving-suggestions
This is my worker.js file:
var CACHE_NAME = 'pwa-task-manager';
var urlsToCache = [
'/',
'/completed',
'/index.html',
'/static/js/main.chunk.js',
'/static/js/0.chunk.js',
'/static/js/1.chunk.js',
'/static/js/bundle.js',
'/calculadora',
'https://calc-marmo.firebaseio.com/clientes.json',
'https://calc-marmo.firebaseio.com/adm.json',
];
// Install a service worker
self.addEventListener('install', event => {
// Perform install steps
event.waitUntil(
caches.open(CACHE_NAME)
.then(function(cache) {
console.log('Opened cache');
return cache.addAll(urlsToCache);
})
);
});
// Cache and return requests
self.addEventListener('fetch', event => {
event.respondWith(
caches.match(event.request)
.then(function(response) {
// Cache hit - return response
if (response) {
return response;
}
return fetch(event.request);
}
)
);
});
// Update a service worker
self.addEventListener('activate', event => {
var cacheWhitelist = ['pwa-task-manager'];
event.waitUntil(
caches.keys().then(cacheNames => {
return Promise.all(
cacheNames.map(cacheName => {
if (cacheWhitelist.indexOf(cacheName) === -1) {
return caches.delete(cacheName);
}
})
);
})
);
});
Any help would be much appreciated.
This sound like you need a Network First strategy which is not mention in the cookbook. This strategy is similar to Network falling back to cache but additionally stores the response always in the cache.
Explanation: https://developers.google.com/web/tools/workbox/modules/workbox-strategies#network_first_network_falling_back_to_cache
Code sample (if you don't use workbox): https://gist.github.com/JMPerez/8ca8d5ffcc0cc45a8b4e1c279efd8a94
I can register my Progressive Web App as a share target for images (supported from Chrome Android 76):
"share_target": {
"action": "/share-target",
"method": "POST",
"enctype": "multipart/form-data",
"params": {
"files": [
{
"name": "myImage",
"accept": ["image/jpeg", "image/png"]
}
]
}
}
I can then intercept attempts to share images to the app in a service worker:
self.addEventListener('fetch', e => {
if (e.request.method === 'POST' && e.request.url.endsWith('/share-target')) {
// todo
}
})
How would I display the shared image in my offline PWA?
There are a few different steps to take here.
I put together a working example at https://web-share-offline.glitch.me/, with the source at https://glitch.com/edit/#!/web-share-offline
Ensure your web app works offline
This is a prerequisite, and I accomplished it by generating a service worker that would precache my HTML, JS, and CSS using Workbox.
The JS that runs when the home page is loaded uses the Cache Storage API to read a list of image URLs that have been cached, to creates <img> elements on the page corresponding to each one.
Create a share target handler that will cache images
I also used Workbox for this, but it's a bit more involved. The salient points are:
Make sure that you intercept POST requests for the configured share target URL.
It's up to you to read in the body of the shared images and to write them to your local cache using the Cache Storage API.
After you save the shared image to cache, it's a good idea to respond to the POST with a HTTP 303 redirected response, so that the browser will display the actual home page for your web app.
Here's the Workbox configuration code that I used to handle this:
const shareTargetHandler = async ({event}) => {
const formData = await event.request.formData();
const cache = await caches.open('images');
await cache.put(
// TODO: Come up with a more meaningful cache key.
`/images/${Date.now()}`,
// TODO: Get more meaningful metadata and use it
// to construct the response.
new Response(formData.get('image'))
);
// After the POST succeeds, redirect to the main page.
return Response.redirect('/', 303);
};
module.exports = {
// ... other Workbox config ...
runtimeCaching: [{
// Create a 'fake' route to handle the incoming POST.
urlPattern: '/share-target',
method: 'POST',
handler: shareTargetHandler,
}, {
// Create a route to serve the cached images.
urlPattern: new RegExp('/images/\\d+'),
handler: 'CacheOnly',
options: {
cacheName: 'images',
},
}],
};
I have this one-page app with a dynamic URL built with a token, like example.com/XV252GTH and various assets, like css, favicon and such.
Here is how I register the Service Worker:
navigator.serviceWorker.register('sw.js');
And in said sw.js, I pre-cache the assets while installing:
var cacheName = 'v1';
var cacheAssets = [
'index.html',
'app.js',
'style.css',
'favicon.ico'
];
function precache() {
return caches.open(cacheName).then(function (cache) {
return cache.addAll(cacheAssets);
});
}
self.addEventListener('install', function(event) {
event.waitUntil(precache());
});
Note that the index.html (that registers the Service Worker) page is just a template, that gets populated on the server before being sent to the client ; so in this pre-caching phase, I'm only caching the template, not the page.
Now, in the fetch event, any requested resource that is not in the cache gets copied to it:
addEventListener('fetch', event => {
event.respondWith(async function() {
const cachedResponse = await caches.match(event.request);
if (cachedResponse) return cachedResponse;
return fetch(event.request).then(updateCache(event.request));
}());
});
Using this update function
function updateCache(request) {
return caches.open(cacheName).then(cache => {
return fetch(request).then(response => {
const resClone = response.clone();
if (response.status < 400)
return cache.put(request, resClone);
return response;
});
});
}
At this stage, all the assets are in the cache, but not the dynamically generated page. Only after a reload, can I see another entry in the cache: /XV252GTH. Now, the app is offline-ready ; But this reloading of the page kind of defeats the whole Service Worker purpose.
Question: How can I send the request (/XV252GTH) from the client (the page that registers the worker) to the SW? I guess I can set up a listener in sw.js
self.addEventListener('message', function(event){
updateCache(event.request)
});
But how can I be sure that it will be honored in time, ie: sent by the client after the SW has finished installing? What is a good practice in this case?
OK, I got the answer from this page: To cache the very page that registers the worker at activation time, just list all the SW's clients, and get their URL (href attribute).
self.clients.matchAll({includeUncontrolled: true}).then(clients => {
for (const client of clients) {
updateCache(new URL(client.url).href);
}
});
Correct me if I understood you wrong!
You precache your files right here:
var cacheAssets = [
'index.html',
'app.js',
'style.css',
'favicon.ico'
];
function precache() {
return caches.open(cacheName).then(function (cache) {
return cache.addAll(cacheAssets);
});
}
It should be clear that you cache the template since you cache it before the site gets build and this approach is not wrong, at least not for all types of files.
Your favicon.ico for example is a file that you would probably consider as static. Also, it does not change very often or not at all and it isn't dynamic like your index.html.
Source
It should also be clear why you have the correct version after reloading the page since you have an update function.
The solution to this problem is the answer to your question:
How can I send the request (/XV252GTH) from the client (the page that registers the worker) to the SW?
Instead of caching it before the service-worker is installed you want to cache it if the back end built your web page. So here is how it works:
You have an empty cache or at least a cache without your index.html.
Normally a request would be sent to the server to get the index.html. Instead, we do a request to the cache and check if the index.html is in the cache, at least if you load the page for the first time.
Since there is no match in the cache, do a request to the server to fetch it. This is the same request the page would do if it would load the page normally. So the server builds your index.html and sends it back to the page.
After receiving the index.html load it to the page and store it in the cache.
An example method would be Stale-while-revalidate:
If there's a cached version available, use it, but fetch an update for next time.
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.open('mysite-dynamic').then(function(cache) {
return cache.match(event.request).then(function(response) {
var fetchPromise = fetch(event.request).then(function(networkResponse) {
cache.put(event.request, networkResponse.clone());
return networkResponse;
})
return response || fetchPromise;
})
})
);
});
Source
Those are the basics for your problem. Now you got a wide variety of options you can choose from that use the same method but have some additional features. Which one you choose is up to you and without knowing your project in detail no one can tell you which one to choose. You are also not limited to one option. In some cases you might combine two or more options together.
Google wrote a great guide about all the options you have and provided code examples for everything. They also explained your current version. Not every option will be interesting and relevant for you but I recommend you to read them all and read them thoroughly.
This is the way to go.
I'm having some problems on setting up a service worker for my website.
I only want to cache css/js/fonts and some images/svg, I don't want to cache the HTML since all of it is updated every minute.
It kinda works, but trying on my smartphone I keep getting the notification "Add to homescreen" even when I've already added it. And on the Chrome Dev app I don't get the Add button.
Also with the Lighthouse I get the following errors:
"Does not respond with a 200 when offline"
"User will not be prompted to Install the Web App, Failures: Manifest start_url is not cached by a Service Worker."
Right now my sw.js is like this. As you can see I commented the fetch part because it was caching the HTML and also the Cookies weren't working.
Is there around a simple Service Worker "template" to use?
const PRECACHE = 'app-name';
const RUNTIME = 'runtime';
// A list of local resources we always want to be cached.
const PRECACHE_URLS = [
'/css/file.css',
'/js/file.js',
'/images/logo.png',
'/fonts/roboto/Roboto-Regular.woff2'
]
// The install handler takes care of precaching the resources we always need.
self.addEventListener('install', event => {
event.waitUntil(
caches.open(PRECACHE)
.then(cache => cache.addAll(PRECACHE_URLS))
.then(self.skipWaiting())
);
});
// The activate handler takes care of cleaning up old caches.
self.addEventListener('activate', event => {
const currentCaches = [PRECACHE, RUNTIME];
event.waitUntil(
caches.keys().then(cacheNames => {
return cacheNames.filter(cacheName => !currentCaches.includes(cacheName));
}).then(cachesToDelete => {
return Promise.all(cachesToDelete.map(cacheToDelete => {
return caches.delete(cacheToDelete);
}));
}).then(() => self.clients.claim())
);
});
// The fetch handler serves responses for same-origin resources from a cache.
// If no response is found, it populates the runtime cache with the response
// from the network before returning it to the page.
self.addEventListener('fetch', event => {
// Skip cross-origin requests, like those for Google Analytics.
// if (event.request.url.startsWith(self.location.origin)) {
// event.respondWith(
// caches.match(event.request).then(cachedResponse => {
// if (cachedResponse) {
// return cachedResponse;
// }
// return caches.open(RUNTIME).then(cache => {
// return fetch(event.request).then(response => {
// // Put a copy of the response in the runtime cache.
// return cache.put(event.request, response.clone()).then(() => {
// return response;
// });
// });
// });
// })
// );
// }
});
I'm not sure why the install banner appears but the two errors given by lighthouse are related to the missing caching of the very start_url, propably index.html. So Lighthouse will always be telling you about those if you follow the caching strategy you described here.
I suggest you could try Workbox and their runtime caching. Runtime caching, in a nutshell, works like so: you specify urls like *.svg, *.css etc. and the SW caches them once the client first asks them. In the future, when the files are already cached, the SW serves them from the cache to the client. Basically you tell the SW to cache this and that kind of urls when it encounters them and not in advance.
Runtime caching could very well be accompanied by precaching (that may also be found from Workbox!) to cache a bunch of files.
Check it out here: https://workboxjs.org
They have a couple of examples and plugins for build tooling.
I'm currently considering adding service workers to a Web app I'm building.
This app is, essentially, a collection manager. You can CRUD items of various types and they are usually tightly linked together (e.g. A hasMany B hasMany C).
sw-toolbox offers a toolbox.fastest handler which goes to the cache and then to the network (in 99% of the cases, cache will be faster), updating the cache in the background. What I'm wondering is how you can be notified that there's a new version of the page available. My intent is to show the cached version and, then, if the network fetch got a newer version, to suggest to the user to refresh the page in order to see the latest edits. I saw something in a YouTube video a while ago but the presenter gives no clue of how to deal with this.
Is that possible? Is there some event handler or promise that I could bind to the request so that I know when the newer version is retrieved? I would then post a message to the page to show a notification.
If not, I know I can use toolbox.networkFirst along with a reasonable timeout to make the pages available even on Lie-Fi, but it's not as good.
I just stumbled accross the Mozilla Service Worker Cookbooj, which includes more or less what I wanted: https://serviceworke.rs/strategy-cache-update-and-refresh.html
Here are the relevant parts (not my code: copied here for convenience).
Fetch methods for the worker
// On fetch, use cache but update the entry with the latest contents from the server.
self.addEventListener('fetch', function(evt) {
console.log('The service worker is serving the asset.');
// You can use respondWith() to answer ASAP…
evt.respondWith(fromCache(evt.request));
// ...and waitUntil() to prevent the worker to be killed until the cache is updated.
evt.waitUntil(
update(evt.request)
// Finally, send a message to the client to inform it about the resource is up to date.
.then(refresh)
);
});
// Open the cache where the assets were stored and search for the requested resource. Notice that in case of no matching, the promise still resolves but it does with undefined as value.
function fromCache(request) {
return caches.open(CACHE).then(function (cache) {
return cache.match(request);
});
}
// Update consists in opening the cache, performing a network request and storing the new response data.
function update(request) {
return caches.open(CACHE).then(function (cache) {
return fetch(request).then(function (response) {
return cache.put(request, response.clone()).then(function () {
return response;
});
});
});
}
// Sends a message to the clients.
function refresh(response) {
return self.clients.matchAll().then(function (clients) {
clients.forEach(function (client) {
// Encode which resource has been updated. By including the ETag the client can check if the content has changed.
var message = {
type: 'refresh',
url: response.url,
// Notice not all servers return the ETag header. If this is not provided you should use other cache headers or rely on your own means to check if the content has changed.
eTag: response.headers.get('ETag')
};
// Tell the client about the update.
client.postMessage(JSON.stringify(message));
});
});
}
Handling of the "resource was updated" message
navigator.serviceWorker.onmessage = function (evt) {
var message = JSON.parse(evt.data);
var isRefresh = message.type === 'refresh';
var isAsset = message.url.includes('asset');
var lastETag = localStorage.currentETag;
// ETag header usually contains the hash of the resource so it is a very effective way of check for fresh content.
var isNew = lastETag !== message.eTag;
if (isRefresh && isAsset && isNew) {
// Escape the first time (when there is no ETag yet)
if (lastETag) {
// Inform the user about the update.
notice.hidden = false;
}
//For teaching purposes, although this information is in the offline cache and it could be retrieved from the service worker, keeping track of the header in the localStorage keeps the implementation simple.
localStorage.currentETag = message.eTag;
}
};