Development Mode Service Worker Script is Different after Build - javascript

I have a ReactJS web with two modes: development and production.
development mode using port: 3001
production using port: 3000
Service worker in development mode is running normally. But, after I run the build process. the result of the output script differs from the development mode.
How to make the service worker script output the same (development mode and production)?
service-worker.js (development mode output script)
const OFFLINE_VERSION = 1;
const CACHE_NAME = 'v-208';
// const OFFLINE_URL = 'offline.html';
const OFFLINE_URL = [
'/offline.html',
'/media/error/bg6.jpg',
'/favicon.ico'
];
self.addEventListener('message', (event) => {
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});
self.addEventListener('install', (event) => {
event.waitUntil(async function() {
const cache = await caches.open(CACHE_NAME);
await cache.addAll(OFFLINE_URL);
}());
});
self.addEventListener('activate', (event) => {
var cacheWhitelist = [CACHE_NAME];
event.waitUntil(async function() {
caches.keys().then(function(keyList) {
return Promise.all(keyList.map(function(key) {
if (cacheWhitelist.indexOf(key) === -1) {
return caches.delete(key);
}
}));
})
// Feature-detect
if (self.registration.navigationPreload) {
// Enable navigation preloads!
await self.registration.navigationPreload.enable();
}
}());
});
self.addEventListener('fetch', (event) => {
const { request } = event;
// Always bypass for range requests, due to browser bugs
if (request.headers.has('range')) return;
event.respondWith(async function() {
// Try to get from the cache:
const cachedResponse = await caches.match(request);
if (cachedResponse) return cachedResponse;
try {
// See https://developers.google.com/web/updates/2017/02/navigation-preload#using_the_preloaded_response
const response = await event.preloadResponse;
if (response) return response;
// Otherwise, get from the network
return await fetch(request);
} catch (err) {
// If this was a navigation, show the offline page:
if (request.mode === 'navigate') {
return caches.match('offline.html');
}
// Otherwise throw
throw err;
}
}());
});
build process :
npm run build
then I run it using pm2 :
pm2 serve build --watch --name ekinerja-frontend 3000 --spa
service-worker.js (production mode output script)
importScripts(
"/precache-manifest.4cd8995656b1c33c355715652d9d2264.js"
);
self.addEventListener('message', (event) => {
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});
workbox.core.clientsClaim();
self.__precacheManifest = [].concat(self.__precacheManifest || []);
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerNavigationRoute(workbox.precaching.getCacheKeyForURL("/index.html"), {
blacklist: [/^\/_/,/\/[^/?]+\.[^/]+$/],
});

You need to change the path of registering for service worker. After build the whole code will move to a new folder. So it will be suggested that keep your service worker outside src, in the static folder and use this while registering your SW-
if ("serviceWorker" in navigator) { navigator.serviceWorker .register("/static/js/serviceWorker.js") .then(....)}

Related

Is it possible to implement a shared state management for CLI applications without the need for an external database?

I want to create a CLI application and I think this question is not about a specific technology but for the sake of reproduction purposes I'm using Node with command-line-commands ( but I know there are plenty others, e.g. commander ).
Given the following sample code
#!/usr/bin/env node
'use strict';
const commandLineArgs = require('command-line-args');
const commandLineCommands = require('command-line-commands');
const commandLineUsage = require('command-line-usage');
let isRunning = false; // global state
let commandResult;
try {
commandResult = commandLineCommands([ 'start', 'info', 'help' ]);
} catch (error) {
console.error('Invalid command.');
process.exit(1);
}
if (commandResult.command === null || commandResult.command === 'help') {
const commandInfo = commandLineUsage([
{ header: 'start', content: 'Sets the value to true' },
{ header: 'info', content: 'Gets the current value' },
]);
console.log(commandInfo);
process.exit(0);
}
let options;
try {
options = commandLineArgs([], { argv: commandResult.argv });
} catch (error) {
console.error('Invalid argument.');
process.exit(1);
}
if (commandResult.command === 'start') {
isRunning = true;
} else if (commandResult.command === 'info') {
console.info({ isRunning });
}
The boolean isRunning indicates a shared state. Calling the start command sets its value to true. But calling the info command obviously starts a new process and prints a new variable isRunning with its initial falsy value.
What is the prefered technology to keep such state? Must the CLI use an external database ( e.g. local filesystem) or are there some ways to keep the information in memory until shutdown?
Generating my own file on the system and storing this variable to it feels like an overkill to me.
An old cross-platform hack is to open a known TCP port. The first process able to open the port will get the port. All other processes trying to open the port will get an EADDRINUSE error:
const net = require('net');
const s = net.createServer();
s.on('error',() => {
console.log('Program is already running!');
// handle what to do here
});
s.listen(5123,'127.0.0.1',() => {
console.log('OK');
// run your main function here
});
This works in any language on any OS. There is only one thing you need to be careful of - some other program may be accidentally using the port you are using.
I originally came across this technique on the Tcl wiki: https://wiki.tcl-lang.org/page/singleton+application.
Another old hack for this is to try and create a symlink.
Creating symlinks are generally guaranteed to be atomic by most Unix and Unix-like OSes. Therefore there is no issue with potential race conditions using this technique (unlike creating a regular file). I presume it is also atomic on Windows (as per POSIX spec) but I'm not entirely sure:
const fs = require('fs');
const scriptName = process.argv[1];
const lockFile = '/tmp/my-program.lock';
try {
fs.symlinkSync(lockFile, scriptName);
// run your main function here
fs.unlinkSync(lockFile);
}
catch (err) {
console.log('Program already running');
// handle what to do here
}
Note: While creating symlinks are atomic, other operations on symlinks are not guaranteed to be atomic. Specifically be very careful of assuming that updating a symlink is atomic - it is NOT. Updating symlinks involve two operations: deleting the link and then creating the link. A second process may execute its delete operation after your process creates a symlink causing two processes to think that they're the only ones running. In the example above we delete the link after creating it, not before.
One way would be to use a local web server.
index.js
const commandLineArgs = require('command-line-args');
const commandLineCommands = require('command-line-commands');
const commandLineUsage = require('command-line-usage');
var http = require('http');
let globalState = {
isRunning: false
}
let commandResult;
try {
commandResult = commandLineCommands([ 'start', 'info', 'help' ]);
} catch (error) {
console.error('Invalid command.');
process.exit(1);
}
if (commandResult.command === null || commandResult.command === 'help') {
const commandInfo = commandLineUsage([
{ header: 'start', content: 'Sets the value to true' },
{ header: 'info', content: 'Gets the current value' },
]);
console.log(commandInfo);
process.exit(0);
}
let options;
try {
options = commandLineArgs([], { argv: commandResult.argv });
} catch (error) {
console.error('Invalid argument.');
process.exit(1);
}
if (commandResult.command === 'start') {
globalState.isRunning = true;
http.createServer(function (req, res) {
res.write(JSON.stringify(globalState));
res.end();
}).listen(9615);
} else if (commandResult.command === 'info') {
console.info({ globalState });
}
index2.js
var http = require('http');
var req = http.request({ host: "localhost", port: 9615, path: "/" }, (response) => {
var responseData = "";
response.on("data", (chunk) => {
responseData += chunk;
});
response.on("end", () => {
console.log(JSON.parse(responseData));
});
});
req.end();
req.on("error", (e) => {
console.error(e);
});
Here the index.js is a program that holds the "shared / global state" as well as creates a web server to communicate with. Other programs such as index2.js here can make a http request and ask for the global state. You could also let other programs change the state by having index.js listen to some specific request and act accordingly.
This doesn't have to be done with http like this, you could also use something like node-rpc or node-ipc. I thought the easiest working example would be to do it with a local http client and server.
Either way, I think the word for what you are looking for is Inter Process Communication (IPC) or Remote Procedure Call (RPC). I don't see why one couldn't also utilize websockets as well. Child processes probably won't work here, even if you could implement some kind of parent-child process communication, because only the child processes spawned by the main process could use that.
EDIT
After reading your question more carefully, I think that this is just a matter of "keeping" the "console session" after start command and setting the isRunning variable.
Check this out:
const commandLineArgs = require('command-line-args');
const commandLineCommands = require('command-line-commands');
const commandLineUsage = require('command-line-usage');
const prompt = require('prompt-sync')();
let globalState = {
isRunning: false
}
let commandResult;
try {
commandResult = commandLineCommands([ 'start', 'info', 'help' ]);
} catch (error) {
console.error('Invalid command.');
process.exit(1);
}
if (commandResult.command === null || commandResult.command === 'help') {
const commandInfo = commandLineUsage([
{ header: 'start', content: 'Sets the value to true' },
{ header: 'info', content: 'Gets the current value' },
]);
console.log(commandInfo);
process.exit(0);
}
let options;
try {
options = commandLineArgs([], { argv: commandResult.argv });
} catch (error) {
console.error('Invalid argument.');
process.exit(1);
}
if (commandResult.command === 'start') {
globalState.isRunning = true;
while(globalState.isRunning)
{
let cmd = prompt(">");
if(cmd === "exit")
process.exit(0);
if(cmd === "info")
console.info({ globalState });
}
} else if (commandResult.command === 'info') {
console.info({ globalState });
}
Here I am using prompt-sync library inside a loop when the program is called with a start command. The "console session" is kept indefinitely until the user types exit. I also added and example for in case the user types info.
Example:

Cached file is not being fetched by the service worker

When I try to access http://localhost/visites/ it should fetch the precached file visites/index.php . So I guess I have to indicate somewhere that this particular route matches that file, do you have any idea on how I can do that?
I leave my SW code here just in case:
const cacheName = 'v1';
const cacheAssets = [
'accueil.php',
'js/accueil.js',
'visites/index.php',
'js/visites.js',
'js/global.js',
'css/styles.css',
'charte/PICTOS/BTN-Visites.png',
'charte/STRUCTURE/GESTEL-Logo.png',
'charte/PICTOS/BTN-Animaux.png',
'charte/PICTOS/BTN-Deconnexion.png',
'charte/PICTOS/BTN-Fermes.png',
];
// Call Install Event
self.addEventListener('install', e => {
console.log('Service Worker: Installed');
e.waitUntil(
caches
.open(cacheName)
.then(cache => {
console.log('Service Worker: Caching Files');
cache.addAll(cacheAssets);
})
.then(() => self.skipWaiting())
);
});
// Call Activate Event
self.addEventListener('activate', e => {
console.log('Service Worker: Activated');
// Remove unwanted caches
e.waitUntil(
caches.keys().then(cacheNames => {
return Promise.all(
cacheNames.map(cache => {
if (cache !== cacheName) {
console.log('Service Worker: Clearing Old Cache');
return caches.delete(cache);
}
})
);
})
);
});
// Call Fetch Event
self.addEventListener('fetch', e => {
console.log('Service Worker: Fetching');
e.respondWith(fetch(e.request).catch(() => caches.match(e.request)));
})
You can include some logic in your fetch handler that accounts for this routing information:
self.addEventListener('fetch', e => {
// Use a URL object to simplify checking the path.
const url = new URL(e.request.url);
// Alternatively, check e.request.mode === 'navigate' if
// you want to match a navigation to any URL on your site.
if (url.pathname === '/visites/') {
e.respondWith(caches.match('visites/index.php'));
// Return after responding, so that the existing
// logic doesn't get triggered.
return;
}
e.respondWith(fetch(e.request).catch(() => caches.match(e.request)));
});

Files are not getting cached in ServiceWorker

I have migrated my existing website to PWA.
My website is very simple made using HTML, JQuery and javascript.
Previously the caching mechanism was dependent on manifest.appcache.
Now when I try to cache the files using service worker none of the file are getting cached.
I have checked my service worker is registered and everything seems fine but files are not getting stored in cache.
I tried clearing the cache but no help.
I am sharing my service worker code below since it is production code I can not share all file names.
I used this site to learn PWA.
const staticCacheName = 'site-static-v4';
const dynamicCacheName = 'site-dynamic-v4';
const assets = [
"index.shtml",
"fallback.html",
"manifest.webmanifest"
];
// cache size limit function
const limitCacheSize = (name, size) => {
caches.open(name).then(cache => {
cache.keys().then(keys => {
if(keys.length > size){
cache.delete(keys[0]).then(limitCacheSize(name, size));
}
});
});
};
// install event
self.addEventListener('install', evt => {
//console.log('service worker installed');
evt.waitUntil(
caches.open(staticCacheName).then((cache) => {
console.log('caching shell assets');
cache.addAll(assets);
})
);
});
// activate event
self.addEventListener('activate', evt => {
//console.log('service worker activated');
evt.waitUntil(
caches.keys().then(keys => {
//console.log(keys);
return Promise.all(keys
.filter(key => key !== staticCacheName && key !== dynamicCacheName)
.map(key => caches.delete(key))
);
})
);
});
// fetch event
self.addEventListener('fetch', evt => {
//console.log('fetch event', evt);
evt.respondWith(
caches.match(evt.request).then(cacheRes => {
return cacheRes || fetch(evt.request).then(fetchRes => {
return caches.open(dynamicCacheName).then(cache => {
cache.put(evt.request.url, fetchRes.clone());
// check cached items size
limitCacheSize(dynamicCacheName, 15);
return fetchRes;
})
});
}).catch(() => {
if(evt.request.url.indexOf('.html') > -1){
return caches.match('fallback.html');
}
})
);
});
Below is the screenshot of my empty cache storage.

Updating service worker to Workbox 5 without bundler

I'm running a pretty straight forward PWA with Workbox 3, primarily for caching and offline purpose. The web page is a forum were users can install a PWA. Planning a upgrade were the current workbox 3 gives some errors when testing and therefor I had to re-build the service worker. Thought of giving workbox 5 a chance.
The code below is what I'm testing with today and purpose should be (pretty straight forward):
Give the user a chance to "install" the new service worker with a button (taken from: https://redfin.engineering/how-to-fix-the-refresh-button-when-using-service-workers-a8e27af6df68)
Cache static assets but not html (except offline.html).
Give navigation preload a chance to increase performance.
Create a service worker based on Workbox 5 that is easy to update in the future (push messages).
service-worker.js:
// Load WB locally, skip preload of googleapis in header. (Version 5.1.3)
importScripts('/workbox/workbox-sw.js');
workbox.setConfig({
modulePathPrefix: '/workbox/'
});
// How are we doing?
if (workbox) {
console.log('Workbox loaded correctly');
} else {
console.log('Workbox did not load, check log');
}
/*
// Debug on or off, off in production
workbox.setConfig({
debug: true
});
*/
// A new SW is waiting, user clicks button that activates the new SW
addEventListener('message', e => {
if (e.data === 'skipWaiting') {
skipWaiting();
clientsClaim();
}
});
// Cache Offline page
const CACHE_NAME = 'offline-html';
const FALLBACK_HTML_URL = '/offline.html';
addEventListener('install', async (event) => {
event.waitUntil(
caches.open(CACHE_NAME)
.then((cache) => cache.add(FALLBACK_HTML_URL))
);
});
// Start navigation preload to speed things up a bit.
workbox.navigationPreload.enable();
const networkOnly = new workbox.strategies.NetworkOnly();
const navigationHandler = async (params) => {
try {
// Attempt a network request.
return await networkOnly.handle(params);
} catch (error) {
// If it fails, return the cached HTML and log the error
console.log(error);
return caches.match(FALLBACK_HTML_URL, {
cacheName: CACHE_NAME,
});
}
};
// Register this strategy to handle all navigations.
const navigationRoute = new workbox.routing.NavigationRoute(navigationHandler);
workbox.routing.registerRoute(navigationRoute);
// Cache static assets
const {StaleWhileRevalidate} = workbox.strategies;
const {CacheFirst} = workbox.strategies;
const {CacheableResponsePlugin} = workbox.cacheableResponse;
workbox.routing.registerRoute(
({request}) => request.destination === 'script' || request.destination === 'style' || request.destination === 'font' || request.destination === 'manifest',
new StaleWhileRevalidate({
// Use a custom cache name.
cacheName: 'static-cache2',
})
);
// Cache image files.
workbox.routing.registerRoute(
({request}) => request.destination === 'image',
// Use the cache if it's available.
new CacheFirst({
// Use a custom cache name.
cacheName: 'image-cache2',
plugins: [
new workbox.expiration.ExpirationPlugin({
// Cache only 100 images.
maxEntries: 100,
// Cache for a maximum of two weeks.
maxAgeSeconds: 14 * 24 * 60 * 60,
purgeOnQuotaError: true,
})
],
})
);
// Try to cache opaque from CDN
workbox.routing.registerRoute(
({url}) => url.origin === 'https://cdn.mycdn.com' &&
url.pathname.startsWith('/static/'),
new CacheFirst({
cacheName: 'cdn-cache',
plugins: [
new CacheableResponsePlugin({
statuses: [0, 200],
})
]
})
);
Client JS:
function showRefreshUI(registration) {
// TODO: Display a toast or refresh UI.
// This demo creates and injects a button.
var button = document.createElement('button');
button.style.position = 'absolute';
button.style.bottom = '24px';
button.style.left = '24px';
button.textContent = 'A new version of the web app is waiting, click here to install';
button.addEventListener('click', function() {
if (!registration.waiting) {
// Just to ensure registration.waiting is available before
// calling postMessage()
return;
}
button.disabled = true;
registration.waiting.postMessage('skipWaiting');
});
document.body.appendChild(button);
};
function onNewServiceWorker(registration, callback) {
if (registration.waiting) {
// SW is waiting to activate. Can occur if multiple clients open and
// one of the clients is refreshed.
return callback();
}
function listenInstalledStateChange() {
registration.installing.addEventListener('statechange', function(event) {
if (event.target.state === 'installed') {
// A new service worker is available, inform the user
callback();
}
});
};
if (registration.installing) {
return listenInstalledStateChange();
}
// We are currently controlled so a new SW may be found...
// Add a listener in case a new SW is found,
registration.addEventListener('updatefound', listenInstalledStateChange);
}
window.addEventListener('load', function() {
var refreshing;
// When the user asks to refresh the UI, we'll need to reload the window
navigator.serviceWorker.addEventListener('controllerchange', function(event) {
if (refreshing) return; // prevent infinite refresh loop when you use "Update on Reload"
refreshing = true;
console.log('Controller loaded');
window.location.reload();
});
navigator.serviceWorker.register('/service-worker.js')
.then(function (registration) {
// Track updates to the Service Worker.
if (!navigator.serviceWorker.controller) {
// The window client isn't currently controlled so it's a new service
// worker that will activate immediately
return;
}
registration.update();
onNewServiceWorker(registration, function() {
showRefreshUI(registration);
});
});
});
This code works on my dev server, my questions to SO:s workbox gurus are if there are any pitfalls with it and maybe someone could share suggestions on how to make it better?
Service workers and Workbox are complex and my main concern is that I've built the service worker so it does not optimize against workbox and maybe uses wrong or bad code/functions/order of functions.

ServiceWorker failing to Cache persistent assets

I am learning how to implement PWAs and understand the underlying concept thus far. My problem is when I am trying to cache static assets in my service worker file, nothing is actually being cached and this error is fired instead.
Uncaught (in promise) TypeError: Request failed
Below is my code.
const staticAssets = [
'./',
'./index.php',
'./account.php',
'./css/account.css',
'./css/bootstrap-4.0.0.css',
'./css/bootstrap.min.css',
'./css/common.css',
'./css/dashboard.css',
'./css/fonts.css',
'./css/home.css',
'./lib/Magnific-Popup/dist/magnific-popup.css', ,
'./lib/sweetalerts/sweetalert.css',
'./images/logo.png',
'./repository/images/*.png',
'./repository/images/*.jpg',
'./repository/images/*.gif',
'./js/account.jsx',
'./js/app.jsx',
'./js/bootstrap-4.0.0.js',
'./js/dashboard.jsx',
'./js/jquery-3.2.1.min.js',
'./js/popper.min.js',
'./js/project.jsx',
'./js/security.jsx',
'./lib/Magnific-Popup/dist/jquery.magnific-popup.min.js',
'./lib/Magnific-Popup/dist/jquery.magnific-popup.js',
'./lib/sweetalerts/sweetalert.min.js',
'./lib/sweetalerts/sweetalert.js'
];
self.addEventListener("install", async event => {
const cache = await caches.open("mystatic-assets");
cache.addAll(staticAssets);
});
self.addEventListener("fetch", event => {
const req = event.request;
event.respondWith(cacheFirst(req));
});
async function cacheFirst(req) {
const cacheResponse = await caches.match(req);
return cacheResponse || fetch(req);
}```
What am I doing wrong?
First of all i have to mention if there is an error or typo in the paths you provide as static URLs it will fail and will not cache anything.
1/ try this
self.addEventListener("install", event => {
event.waitUntil(
caches.open("mystatic-assets").then(cache => {
cache.addAll(staticAssets);
})
);
});
secondly
in your fetch event
self.addEventListener("fetch", event => {
const req = event.request;
event.respondWith(cacheFirst(req));
});
function cacheFirst(req) {
return chaches.match(req) || fetch(req)
}
try this and tell me

Categories

Resources