how to delete service worker cache on localhost - javascript

please see the code of service worker below:
var CACHE_NAME = 'my-site-cache-v18';
var urlsToCache = [
'1.jpg',
'2.png'
];
self.addEventListener('install', function(event) {
// Perform install steps
event.waitUntil(
caches.open(CACHE_NAME)
.then(function(cache) {
console.log('Opened cache');
return cache.addAll(urlsToCache);
})
);
});
self.addEventListener('activate', function(event) {
event.waitUntil(
caches.keys().then(function(cacheNames) {
return Promise.all(
cacheNames.filter(function(cacheName) {
}).map(function(cacheName) {
return caches.delete(cacheName);
})
);
})
);
});
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.match(event.request)
.then(function(response) {
if (response) {
return response;
}
return fetch(event.request);
}
)
);
});
The issue I am facing is in the inspect element of chrome the cache storage graph is continue growing
and when I see the the cache storage in file explorer old folders are not deleted. every time i refresh the page creates a new folder.
these encrypted folders are increasing every time I change the "CACHE_NAME" (the verion of the cache).
Please Help. I tried a lot but unable to solve it.

To use the filter(), the arguement function you are passing in the filter() does not return anything, whereas you need to return a cacheName value from the function.
cacheNames.filter(function(cacheName) {
//you need to return the cachename which you want to
//delete from the cache by specifying the conditions
}).map(function(cacheName) {
return caches.delete(cacheName);
})
for example-
var staticCacheName = 'my-site-v18';
self.addEventListener('install', function(event) {
var urlsToCache = [
'1.jpg',
'2.jpg'
];
event.waitUntil(
caches.open(staticCacheName).then(function(cache){
return cache.addAll(urlsToCache);
}))
});
self.addEventListener('activate',function(event){
console.log('activating');
event.waitUntil(
caches.keys().then(function(cacheNames){
console.log(cacheNames);
return Promise.all(
cacheNames.filter(function(cacheName){
return cacheName.startsWith('my-site') && cacheName != staticCacheName ;
}
).map(function(cacheName){
return caches.delete(cacheName);
})
);
})
);
});

you might need to change your activation event and try another approach.
i use it like that:
self.addEventListener('activate', function(e) {
console.log('[ServiceWorker] Activate');
e.waitUntil(
caches.keys().then(function(keyList) {
return Promise.all(keyList.map(function(key) {
if (key !== cacheName) {
console.log('[ServiceWorker] Removing old cache', key);
return caches.delete(key);
}
}));
})
);
return self.clients.claim();
});

Does the cache usage grow every time you reload the page?
This may be due to a bug in chrome. It has been fixed and looks like it will go out in v65.
https://bugs.chromium.org/p/chromium/issues/detail?id=801024

Related

Service worker run event waitUntil() based on some condition

I am trying to implement a cache-then-network strategy in the service worker which updates the cache in the background. I want to avoid unnecessary fetch requests, so came up with the following solution -
function cache_then_network(event) {
var updated = false;
event.respondWith(
caches.open(staticCacheName)
.then(cache => cache.match(event.request)
.then((response) => {
if (response) {
return response;
}
else {
return fetch(event.request)
.then((response) => {
const resClone = response.clone();
return caches.open(staticCacheName)
.then((cache) => {
cache.put(event.request, response);
updated = true;
return resClone;
})
})
}
})
)
)
if (!updated) {
event.waitUntil(update(event.request))
}
}
The update function updates the cache by fetching the request using the network.The issue is that the updated variable is always false, causing the update function to run everytime.
I'm not well versed with service workers, and the code is basically stitched up from multiple sources. So alternative/better solutions are welcome. My ultimate goal is to cache first, fetch from network in background, and set a flag which tells whether the content has changed or not.
The Service worker offline-cookbook has all the answers -
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.open('mysite-dynamic').then(function(cache) {
return cache.match(event.request).then(function(response) {
var fetchPromise = fetch(event.request).then(function(networkResponse) {
cache.put(event.request, networkResponse.clone());
return networkResponse;
})
return response || fetchPromise;
})
})
);
});

ERR_FAILED when service worker loads new page, why?

I've written a service worker with help from a tutorial:
var CACHE = 'cache-and-update';
self.addEventListener('install', function (evt) {
console.log('The service worker is being installed.');
evt.waitUntil(precache());
});
self.addEventListener('fetch', function (evt) {
evt.respondWith(fromCache(evt.request));
evt.waitUntil(update(evt.request));
});
function precache() {
return caches.open(CACHE).then(function (cache) {
return cache.addAll([
// Nothing.
]);
});
}
function fromCache(request) {
return caches.open(CACHE).then(function (cache) {
return cache.match(request).then(function (matching) {
return matching || Promise.reject('no-match');
});
});
}
function update(request) {
return caches.open(CACHE).then(function (cache) {
return fetch(request).then(function (response) {
return cache.put(request, response);
});
});
}
It always serves from the cache first, then fetches all files, and updates on page reload.
The service worker is registered like this in every HTML file on my server:
<script>
navigator.serviceWorker.register('https://www.example.com/sw.js', {
scope: '../'
});
</script>
Now the problem is, when I go to a page that isn't cached yet, it first shows me the default Chrome ERR_FAILED error (and the 'no-match' promise rejection).
The sw then fetches it anyway, while showing the error page to the client, and on a reload it works again (because it's served from the cache).
Why is this happening and how can I make the service worker load the page from the server when there's no cached version available?
You got the fetch listener wrong here,
You are rejecting the promise if the file was not found in the cache, you should fetch it then cache it instead of returning Promise.reject('no-match') and you do not need the evt.waitUntil in this case
here is a full snippet to a working service worker. If a request doesn't match anything in the cache, we get it from the network, send it to the page and add it to the cache at the same time.
let cacheName = 'cache-v1';
self.addEventListener('install', (e) => {
let cache = caches.open(cacheName).then((c) => {
c.addAll([
// nothing
]);
});
e.waitUntil(cache);
});
self.addEventListener('fetch', function (event) {
event.respondWith(
caches.open(cacheName).then(function (cache) {
return cache.match(event.request).then(function (response) {
return response || fetch(event.request).then(function (response) {
cache.put(event.request, response.clone());
return response;
});
});
})
);
});

In a Chrome extension, how to ensure previous promise resolves before the next one using chrome-promise?

I've been using the chrome-promise library to wrap the Chrome extension API with a facade that returns promises instead of using callbacks. This has generally worked quite well, but I seem to be running into an issue with chrome.storage.local APIs.
My extension's event page listens for the chrome.tabs.onActivated and chrome.tabs.onRemoved events. When it gets the onActivated event, it adds the tab info to an array and calls chrome.storage.local.set(data) to store the updated array in local storage.
When it gets the onRemoved event, it calls chromepromise.storage.local.get(null).then(...) to get the list of tabs via a promise, removes the tab info from the array, and then calls chrome.storage.local.set() again to save the updated array.
The issue is that the onActivated event seems to trigger before the promise flow from the onRemoved event resolves. So the onActivated handler retrieves the old stored array, with the closed tab still in it, and then pushes the newly activated tab. So the stored tab data now includes a tab that's already been closed.
I'm assuming this is an issue with using promises instead of callbacks, but I'm wondering if anyone else has run into this problem with this library and worked around it.
Update
As wOxxOm points out, this is a generic problem with "arbitrating unpredictable asynchronous access to a single resource such as chrome.storage" and not unique to the chrome-promise library.
After researching a bit, I came up with a couple solutions, added as answers below. One uses a mutex to ensure (I think) that one promise chain's getting and setting data in chrome.storage completes before the next one starts. The other queues the whole promise chain that's created from an event and doesn't start the next one until the current one has fully completed. I'm not sure which is better, though I suppose locking for a shorter period of time is better.
Any suggestions or better answers are welcome.
Queue
This solution uses a very simple queuing mechanism. The event handlers call queue() with a function that kicks off the promise chain to handle that event. If there isn't already a promise in the queue, then the function is called immediately. Otherwise, it's pushed on the queue and will be triggered when the current promise chain finishes. This means only one event can be processed at a time, which might not be as efficient.
var taskQueue = [];
function queue(
fn)
{
taskQueue.push(fn);
processQueue();
}
function processQueue()
{
const nextTask = taskQueue[0];
if (nextTask && !(nextTask instanceof Promise)) {
taskQueue[0] = nextTask()
.then((result) => {
console.log("RESULT", result);
taskQueue.shift();
processQueue();
});
}
}
function onActivated(tabID) {
console.log("EVENT onActivated", tabID);
queue(() => Promise.resolve(tabID).then(tab => addTab(tab)));
}
function onRemoved(tabID) {
console.log("EVENT onRemoved", tabID);
queue(() => removeTab(tabID));
}
var localData = {
tabs: []
};
function delay(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
function getData()
{
return delay(0).then(() => JSON.parse(JSON.stringify(localData)));
}
function saveData(data, source)
{
return delay(0)
.then(() => {
localData = data;
console.log("save from:", source, "localData:", localData);
return Promise.resolve(localData);
});
}
function addTab(tabID)
{
return getData().then((data) => {
console.log("addTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
data.tabs.push(tabID);
return saveData(data, "addTab");
});
}
function removeTab(tabID)
{
return getData().then((data) => {
console.log("removeTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
return saveData(data, "removeTab");
});
}
const events = [
() => onActivated(1),
() => onActivated(2),
() => onActivated(3),
() => onActivated(4),
() => onActivated(2),
() => { onRemoved(2); onActivated(3) }
];
function playNextEvent()
{
var event = events.shift();
if (event) {
delay(0).then(() => { event(); delay(0).then(playNextEvent) });
}
}
playNextEvent();
Mutex
Update: I ended up using the approach below to create a module that uses a mutex to ensure gets and sets of the Chrome extension storage maintain their order. It seems to be working well so far.
This solution uses the mutex implementation from this article. addTab() and removeTab() call storageMutex.synchronize() with a function that does all the storage getting and setting. This should prevent later events from affecting the storage of earlier events.
The code below is a very simplified version of the extension, but it does run. The playNextEvent() calls at the bottom simulate opening 4 tabs, switching back to tab 2 and closing it, which then causes tab 3 to activate. setTimeout()s are used so that everything doesn't run as one long call stack.
function Mutex() {
this._busy = false;
this._queue = [];
}
Object.assign(Mutex.prototype, {
synchronize: function(task) {
var self = this;
return new Promise(function(resolve, reject) {
self._queue.push([task, resolve, reject]);
if (!self._busy) {
self._dequeue();
}
});
},
_dequeue: function() {
var next = this._queue.shift();
if (next) {
this._busy = true;
this._execute(next);
} else {
this._busy = false;
}
},
_execute: function(record) {
var task = record[0],
resolve = record[1],
reject = record[2],
self = this;
task().then(resolve, reject).then(function() {
self._dequeue();
});
}
});
const storageMutex = new Mutex();
function onActivated(tabID) {
console.log("EVENT onActivated", tabID);
return Promise.resolve(tabID).then(tab => addTab(tab));
}
function onRemoved(tabID) {
console.log("EVENT onRemoved", tabID);
return removeTab(tabID);
}
var localData = {
tabs: []
};
function delay(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
function getData()
{
return delay(0).then(() => JSON.parse(JSON.stringify(localData)));
}
function saveData(data, source)
{
return delay(0)
.then(() => {
localData = data;
console.log("save from:", source, "localData:", localData);
return Promise.resolve(localData);
});
}
function addTab(tabID)
{
return storageMutex.synchronize(() => getData().then((data) => {
console.log("addTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
data.tabs.push(tabID);
return saveData(data, "addTab");
}));
}
function removeTab(tabID)
{
return storageMutex.synchronize(() => getData().then((data) => {
console.log("removeTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
return saveData(data, "removeTab");
}));
}
const events = [
() => onActivated(1),
() => onActivated(2),
() => onActivated(3),
() => onActivated(4),
() => onActivated(2),
() => { onRemoved(2); onActivated(3) }
];
function playNextEvent()
{
var event = events.shift();
if (event) {
delay(0).then(() => { event(); delay(0).then(playNextEvent) });
}
}
playNextEvent();

Service Worker get from cache then update cache

I'm using the following logic in my service worker (in my own words):
If cache exists, use it, but also update cache from the network for later
event.respondWith( // on `fetch`
caches.open(CACHE)
.then(function(cache) {
return cache.match(request);
})
.then(function(matching) {
if (matching) {
requestAndUpdateCache(event);
return matching;
}
...
In addition to responding with the cached response, I also run this function called requestAndUpdateCache.
function requestAndUpdateCache(event){
var url = event.request.url + '?t=' + new Date().getTime();
fetch(url)
.then(function(response){
if (response && response.status === 200){
caches.open(CACHE)
.then(function(cache){
cache.put(event.request, response.clone());
});
}
}, function(error){
console.log(error);
});
}
Questions: Does this function and its placement make sense to accomplish the logic outlined above?
What you're describing is a stale-while-revalidate strategy.
The canonical place to look for implementations of different service worker caching strategies is Jake Archibald's The Offline Cookbook. There's a section that covers stale-while-revalidate, including the following code:
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.open('mysite-dynamic').then(function(cache) {
return cache.match(event.request).then(function(response) {
var fetchPromise = fetch(event.request).then(function(networkResponse) {
cache.put(event.request, networkResponse.clone());
return networkResponse;
})
return response || fetchPromise;
})
})
);
});

Should I use promise for mkdir

I use the bluebird magic
var fs = Promise.promisifyAll(require('fs'));
and use
fs.readdirAsync(dest).then(function (val) {
return val;
}).then(function (file) {
...
My question is for the following code (which is working) should I use the same and how
I am talking about the mkdir function.
function createDir(folder) {
return function (req, res, next) {
if (typeof require.cache.per === 'undefined') {
require.cache.per = {};
require.cache.per.mk = false;
}
if (!require.cache.per.mk) {
fs.mkdir(folder, function (e) {
if (!!e && e.code !== 'EEXIST') {
console.log('Error to create folder: ' + err);
}
require.cache.per.mk = true;
next();
});
} else {
next();
}
};
}
My Question is should I use promise here or not, what is recommended ?
The code is working as expected...
A Promise simplifies, and unifies the interface. Either .promisify() or .promisifyAll() will do the trick.
Then you can chain everything like this:
fs.mkdir(dir)
.then(function success(dir) {
...
})
.catch(function failure(err) {
...
})
.finally(function () {
});
However in node.js, the most important thing is to NOT block the I/O. It doesn't matter whether you use a Promise or a regular async/callback, as long as it's not blocking the main thread.
It's ok to have synchronous code in script that you want to run in shell, but for regular applications you should never use blocking I/O operations on purpose.
I would definitely update your code to be consistent. If possible, call mkdirAsync instead of mkdir
Example (from OP's code):
var fs = Promise.promisifyAll(require('fs'));
// ...
fs.mkdirAsync(folder)
.catch({ code: 'EEXIST' }, function(e){
// don't care about this error code
})
.catch(function(e) {
console.log('Error to create folder: ' + e);
})
.then(function(){
require.cache.per.mk = true;
next();
});
Promise.promisifyAll(fs);
return fs.mkdirAsync(dir1)
.then(function() {
return fs.mkdirAsync(dir2);
})
.then(function() {
return fs.mkdirAsync(dir3);
})
Hope this helps.

Categories

Resources