Used service workers with apache aliases not working - javascript

I want to add service worker to cache loaded resources in my application.
Let's imagin
My application is loading under https://domain/application
from that /application page i'm fetching resources from a server alias,
https://domain/mat-resources/applications/application1/dist/..
here is my register,
if (navigator.serviceWorker) {
navigator.serviceWorker.register('/mat-resources/applications/application1/
/service-worker.js', {scope: '/mat-resources/applications/application1/'}).then(function (registration) {
console.log("registration", registration);
}).catch(function (e) {
console.log(e);
});
} else {
console.log('Service Worker is not supported in this browser.')
}
this is the code I have added to service worker js
'use strict';
const VERSION = 'v1';
const PRECACHE = `precache-${VERSION}`;
const RUNTIME = `runtime-${VERSION}`;
const enableRuntimeCache = true;
const mode = 'cache-update';
const PRECACHE_URLS = [
'https://fonts.googleapis.com/css?family=Open+Sans:400,600|Roboto:400,500',
'./dist/js/vendor.bundle.js',
'./dist/js/app.bundle.js',
'./dist/css/styles.min.css'
];
let NetworkOnline = true;
self.addEventListener('install', event => {
event.waitUntil(
caches.open(PRECACHE).then(cache => {
cache.addAll(PRECACHE_URLS);
}).then(self.skipWaiting())
);
});
self.addEventListener('activate', event => {
const currentCaches = [PRECACHE, RUNTIME];
event.waitUntil(
caches.keys().then(cacheNames => {
return cacheNames.filter(cacheName => !currentCaches.includes(cacheName));
}).then(cachesToDelete => {
return Promise.all(cachesToDelete.map(cacheToDelete => {
return caches.delete(cacheToDelete);
}));
}).then(() => {
self.clients.claim();
})
);
});
self.addEventListener('fetch', event => {
if (event.request.url.startsWith(self.location.origin)) {
event.respondWith(fromCache(event.request));
if (isOnline()) {
if (mode === 'cache-update') {
event.waitUntil(
update(event.request)
/*.then(refresh)*/
.catch(errorHandler)
);
}
}
}
});
/*function setFromCache(request) {
console.log(self);
updateFromCache(true);
}*/
function fromCache(request) {
return caches.match(request).then(cachedResponse => {
if (cachedResponse) {
return cachedResponse;
}
if (isOnline()) {
if (enableRuntimeCache) {
return caches.open(RUNTIME).then(cache => {
return fetch(request).then(response => {
return cache.put(request, response.clone()).then(() => {
return response;
});
}).catch(errorHandler);
});
} else {
return fetch(request).then(response => {
return response;
});
}
}
});
}
function update(request) {
let asset = request.url.substr(request.url.lastIndexOf('/') + 1);
let openCache = (PRECACHE_URLS.some(val => val.indexOf(asset) >= 0)) ? PRECACHE : RUNTIME;
return caches.open(openCache).then(cache => {
return fetch(`${request.url }?${new Date().valueOf()}`).then(response => {
return cache.put(request, response.clone()).then(() => {
return response;
});
}).catch(errorHandler);
});
}
function refresh(response) {
return self.clients.matchAll().then(clients => {
clients.forEach(client => {
var message = {
type: 'refresh',
url: response.url,
eTag: response.headers.get('ETag')
};
client.postMessage(JSON.stringify(message));
});
});
}
function isOnline() {
return self.navigator.onLine;
}
function errorHandler(error) {
if (error instanceof TypeError) {
if (error.message.includes('Failed to fetch')) {
console.error('(FtF) Error caught:', error);
} else {
console.error('Error caught:', error);
}
}
}
After I refresh service worker is successfully, I can see service worker is well registerd but however resources are not chached.
Please help??

You have defined a scope when your register the service worker. This limits the access of the service worker to only handle fetches that are for resources that fall under the /mat-resources/applications/application1/**/* path
{scope: '/mat-resources/applications/application1/'}
If you want the service worker to handle resources at the root of the application /application you need to set the scope to /.
{scope: '/'}
You can read more about scopes on Web Fundamentals.

Related

workbox Cache processing does not work with PrecacheController in v6

I was using workbox5.14 (#nuxtjs/pwa) to precache the 'message' event by passing the URL to PrecacheController when received. (from workbox-window)
At this time, PrecacheController.addToCacheList() would not execute the cache, and for some reason PrecacheController.install() would do so.
However, with v6, PrecacheController.install() now requires an 'install' event or an 'activate' event, and I cannot execute PrecacheController.install(event) with a 'message' event. This results in an error.
"sw.js:238 Uncaught (in promise) DOMException: Failed to execute 'waitUntil' on 'ExtendableEvent': The event handler is already finished and no extend lifetime promises are outstanding."
How can we execute PrecacheController's caching process on the 'message' event?
Library Affected:
workbox-precaching
Browser & Platform:
Google Chrome 106.0.5249.103(Official Build)
Issue or Feature Request Description:
v5 ok
/* global importScripts, workbox, consola, processFuncPromise */
const cacheName = workbox.core.cacheNames.precache
const precacheController = new workbox.precaching.PrecacheController(cacheName)
addEventListener('message', async (event) => {
if (event.data.type === 'ADD_PRECACHE') {
const cacheTargetFiles = event.data.payload
const addCaches = async (cacheTargetFiles) => {
for (const file of cacheTargetFiles) {
const cacheKey = precacheController.getCacheKeyForURL(file.filePath)
const cached = await caches
.match(cacheKey)
.then((response) => response !== undefined)
if (!cached) {
precacheController.addToCacheList([{ url: file.filePath, revision: file.revision }])
}
}
}
const checkCaches = async () => {
const cachedList = cacheTargetFiles.map(async (file) => {
const cacheKey = precacheController.getCacheKeyForURL(file.filePath)
const cached = await caches
.match(cacheKey)
.then((response) => response !== undefined)
return cached
})
const cachedListResult = await Promise.all(cachedList)
const cachedListResultFilterd = cachedListResult.filter((response) => {
return response
})
if (cachedListResultFilterd.length === cacheTargetFiles.length) {
return Promise.resolve({ isCompleted: true })
} else {
return Promise.resolve(null)
}
}
await addCaches(cacheTargetFiles)
// ★Caching is started with this INSTALL
precacheController.install()
await processFuncPromise(checkCaches)
self.clients.matchAll().then((clients) =>
clients.forEach((client) => {
client.postMessage({ type: 'FINISHED_ADD_PRECACHE' })
})
)
}
})
addEventListener('install', (event) => {
event.waitUntil(precacheController.install())
event.waitUntil(self.skipWaiting())
})
addEventListener('activate', (event) => {
workbox.precaching.cleanupOutdatedCaches()
event.waitUntil(precacheController.activate())
event.waitUntil(self.clients.claim())
})
addEventListener('fetch', (event) => {
const cacheKey = precacheController.getCacheKeyForURL(event.request.url)
event.respondWith(
caches.match(cacheKey).then(function (response) {
// Cache hit - return the response from the cached version
if (response) {
return response
}
// Not in cache - return the result from the live server
// `fetch` is essentially a "fallback"
return fetch(event.request)
})
)
})
// ↓processFuncPromise()
// export const processFuncPromise = (func, interval = 500) => {
// const retryFunc = (resolve, reject) =>
// func()
// .then((result) => ({ result, isCompleted: result !== null }))
// .then(({ result, isCompleted }) => {
// if (isCompleted) {
// return resolve(result)
// } else {
// return setTimeout(() => retryFunc(resolve, reject), interval)
// }
// })
// .catch(reject)
// return new Promise(retryFunc)
// }
v6 ng
/* global importScripts, workbox, consola, processFuncPromise */
const cacheName = workbox.core.cacheNames.precache
const precacheController = new workbox.precaching.PrecacheController(cacheName)
addEventListener('message', async (event) => {
if (event.data.type === 'ADD_PRECACHE') {
const cacheTargetFiles = event.data.payload
const addCaches = async (cacheTargetFiles) => {
for (const file of cacheTargetFiles) {
const cacheKey = precacheController.getCacheKeyForURL(file.filePath)
const cached = await caches
.match(cacheKey)
.then((response) => response !== undefined)
if (!cached) {
precacheController.addToCacheList([{ url: file.filePath, revision: file.revision }])
}
}
}
const checkCaches = async () => {
const cachedList = cacheTargetFiles.map(async (file) => {
const cacheKey = precacheController.getCacheKeyForURL(file.filePath)
const cached = await caches
.match(cacheKey)
.then((response) => response !== undefined)
return cached
})
const cachedListResult = await Promise.all(cachedList)
const cachedListResultFilterd = cachedListResult.filter((response) => {
return response
})
if (cachedListResultFilterd.length === cacheTargetFiles.length) {
return Promise.resolve({ isCompleted: true })
} else {
return Promise.resolve(null)
}
}
await addCaches(cacheTargetFiles)
// ★ERROR
precacheController.install(event)
await processFuncPromise(checkCaches)
self.clients.matchAll().then((clients) =>
clients.forEach((client) => {
client.postMessage({ type: 'FINISHED_ADD_PRECACHE' })
})
)
}
})
addEventListener('install', (event) => {
precacheController.install(event)
event.waitUntil(self.skipWaiting())
})
addEventListener('activate', (event) => {
workbox.precaching.cleanupOutdatedCaches()
precacheController.activate(event)
event.waitUntil(self.clients.claim())
})
addEventListener('fetch', (event) => {
const cacheKey = precacheController.getCacheKeyForURL(event.request.url)
event.respondWith(
caches.match(cacheKey).then(function (response) {
// Cache hit - return the response from the cached version
if (response) {
return response
}
// Not in cache - return the result from the live server
// `fetch` is essentially a "fallback"
return fetch(event.request)
})
)
})
// ↓processFuncPromise()
// export const processFuncPromise = (func, interval = 500) => {
// const retryFunc = (resolve, reject) =>
// func()
// .then((result) => ({ result, isCompleted: result !== null }))
// .then(({ result, isCompleted }) => {
// if (isCompleted) {
// return resolve(result)
// } else {
// return setTimeout(() => retryFunc(resolve, reject), interval)
// }
// })
// .catch(reject)
// return new Promise(retryFunc)
// }
I asked the question on githubbut could not get an answer, so I came here.

subscribe is deprecated in Ionic

I have this warning "subscribe is deprecated: Use an observer instead of a complete callback" in a Ionic Proyect. Please Help.
fetch(cb) {
this.loadingIndicator = true;
this.cservice.postNcRangoConta(this.body).subscribe(
res => {
try {
if (res) {
this.headers = Object.keys(res[0]);
this.columns = this.getColumns(this.headers);
this.temp = [...res];
cb(res);
this.loadingIndicator = false;
}
} catch (error) {
this.loadingIndicator = false;
this.rows = null;
this.toast.presentToast('No se encontraron datos', 'warning');
}
},
err => {
console.log(err);
if (this.desde || this.hasta) {
this.loadingIndicator = false;
this.toast.presentToast('La API no responde', 'danger');
} else {
this.loadingIndicator = false;
this.toast.presentToast('Debe llenar las fechas', 'warning');
}
}
);
}
The method subscribe isn't actually deprecated, but the way you're using it is deprecated. Try switching to the new syntax of it.
// Deprecated
source.subscribe(
(res) => cb(res),
error => console.error(error),
() => console.log('Complete')
);
// Recommended
source.subscribe({
next: (res) => cb(res),
error: error => console.error(error),
complete: () => console.log('Complete')
});

Webrtc how to set the video remote after receiver back an asnwer

hello to all I am new in this I have all almost done,but just on my initiator for say something I don't know the way for receiver the remote video stream after receiver the answer, can some one help me please ?
this is my client
$(document).ready(() => {
const configuration = {
iceServers: [{ url: 'stun:stun2.1.google.com:19302' }]
}
var peerConection = null;
// var btnCall = $('body #call');
var list = $('#mylist');
var TitlePrint = $('#titleUser');
var localVideo = document.getElementById('local');
var remoteVideo = document.getElementById('remote');
var userid = null;
var socket = io();
socket.on('connect', () => {
userid = socket.id
TitlePrint.text(userid);
});
socket.on('users', data => {
var users = [];
list.empty();
for (let index = 0; index < data.user.length; index++) {
if (data.user[index] != userid) {
users.push(`<button id="call" class="list-group-item list-group-item-action" data-ids="${data.user[index]}">${data.user[index]}</button>`);
}
}
if (users.length != 0) {
list.html(users);
} else {
list.html(`<div class="list-group-item"> Any users connected! </div>`);
}
});
$('body').on('click', '#call', function () {
let toId = $(this).attr('data-ids');
socket.emit('initiator', { initiatorid: userid, receiverid: toId });
});
socket.on('initiator', data => {
peerConection = createRTC(socket);
if (data.initiatorid === userid) {
console.log('this is the initiator');
initiateSignaling(socket, peerConection, data.receiverid, data.initiatorid);
} else {
console.log('this is the receiver');
prepareToReceiveOffer(socket, peerConection, data.initiatorid, data.receiverid);
}
});
// =============== HELPERS =====================//
function createRTC(socket) {
console.log('createRTC')
var peerConection = new RTCPeerConnection(configuration);
peerConection.onicecandidate = (e) => {
if (e.candidate) {
console.log('emit candidate')
socket.emit('send-candidate', e.candidate);
}
}
socket.on('receiver-candidate', (candidate) => {
peerConection.addIceCandidate(candidate);
});
return peerConection;
}
function initiateSignaling(socket, peerConection, targetID, from) {
navigator.mediaDevices.getUserMedia({ video: true, audio: false }).then((stream) => {
stream.getTracks().forEach(function (track) {
peerConection.addTrack(track, stream);
});
localVideo.srcObject = stream;
peerConection.createOffer().then(function (offer) {
return peerConection.setLocalDescription(offer);
})
.then(function () {
socket.emit('send-offer', {
from: from,
target: targetID,
type: "send-offer",
sdp: peerConection.localDescription
});
})
.catch(function (reason) {
console.log('error on create offer', reason);
});
})
socket.on('receiver-answer', (answer) => {
console.log(answer);
peerConection.setRemoteDescription(answer.sdp);
peerConection.ontrack = function (event) {
remoteVideo.srcObject = event.streams[0];
};
});
}
function prepareToReceiveOffer(socket, peerConection, targetID, from) {
socket.on('receiver-offer', (offer) => {
console.log(offer);
peerConection.setRemoteDescription(offer.sdp);
peerConection.createAnswer().then(function (answer) {
return peerConection.setLocalDescription(answer);
})
.then(function () {
socket.emit('send-answer', {
from: from,
target: targetID,
type: "send-answer",
sdp: peerConection.localDescription
});
});
peerConection.ontrack = function (event) {
remoteVideo.srcObject = event.streams[0];
};
navigator.mediaDevices.getUserMedia({ video: true, audio: false }).then((stream) => {
localVideo.srcObject = stream;
})
});
}
});
I just using socket.io I am handle the offer and answer then on my socket server I just set like this
socket.on('initiator', (init) => {
console.log(init);
io.to('video').emit('initiator', init);
});
socket.on('send-offer', offer => {
console.log('sending offer', offer);
socket.broadcast.emit('receiver-offer', offer);
});
socket.on('send-answer', answer => {
console.log('sending answer', answer);
socket.broadcast.emit('receiver-answer', answer);
});
socket.on('send-candidate', candidate => {
console.log(candidate);
socket.broadcast.emit('receiver-candidate',candidate);
});
I am get video remote on my receiver from the initiator but not in the initiator, I don't know what I miss for getting the remote video thanks so mush guys
The initiator calls addTrack(), but not the receiver doesn't, so this is only sending media one way.
In prepareToReceiveOffer you call getUserMedia() but never add resulting tracks to the peer connection. If you want a two-way call, it needs to call addTrack() as part of the offer/answer negotiation.
Just be sure to call getUserMedia() after setRemoteDescription to not miss ICE candidates:
function prepareToReceiveOffer(socket, peerConection, targetID, from) {
socket.on('receiver-offer', (offer) => {
console.log(offer);
peerConection.setRemoteDescription(offer.sdp)
.then(() => navigator.mediaDevices.getUserMedia({video: true, audio: false}))
.then(stream => {
localVideo.srcObject = stream;
for (const track of stream.getTracks()) {
peerConection.addTrack(track, stream);
}
return peerConection.createAnswer();
})
.then(function (answer) {
return peerConection.setLocalDescription(answer);
})
.then(function () {
socket.emit('send-answer', {
from: from,
target: targetID,
type: "send-answer",
sdp: peerConection.localDescription
});
})
.catch(err => console.log(err.message));
peerConection.ontrack = function (event) {
remoteVideo.srcObject = event.streams[0];
};
});
}
I get resolve it, in this particular case I my initiateSignaling function when I receiver the offer I has to include this
socket.on('receiver-answer', (answer) => {
console.log(answer);
peerConection.setRemoteDescription(answer.sdp)
.then(function () {
return navigator.mediaDevices.getUserMedia({video:true, audio: false});
})
.then(function (stream) {
return peerConection.addStream(stream);
})
peerConection.ontrack = function (event) {
remoteVideo.srcObject = event.streams[0];
};
});
now is working for me

Testing Chained Promises (Jasmine, React, Karma)

I have run into several situations on my present project where I have a chain of promises that I'm not sure how to deal with.
Here is the relevant code block:
return this.axios.get(path, requestOpts)
.then((response) => {console.log('did authorize: ', response); return response})
.then((response) => {
if (response.data.ok) {
window.localStorage.setItem(path, JSON.stringify(response.data));
console.log("Setting localStorage item ", path, response.data);
return response.data.payloadUrl;
} else {
console.error("Non-ok response for ", path, response.data);
const resp: DisplayTokenResponse = response.data;
//TODO: reject promise?
if (resp.status === "AUTHENTICATION_REQUIRED") {
this.axiosService.goToLoginPage(window.location + '');
}
Promise.reject(response.data.message);
}
});
My test (so far) looks like this:
describe('.authorize()', () => {
let axiosSpy: jasmine.Spy;
beforeEach((done) => {
spyOn(svc, 'keyPath').and.returnValue(path);
spyOn(svc, 'storedToken').and.returnValue(stored);
let response = {
data: {
ok: true,
message: 'test-response',
payloadUrl: 'http://payload-url.com'
}
}
spyOn(svc.axios, 'get').and.callFake(
(path:string, reqOpts:AxiosRequestConfig) => {
return new Promise(() => {
response
});
}, (e) => {
console.log(`failed`);
});
});
describe('should authorize user', () => {
it('when supplied a STRING', () => {
clientId = clientId_string;
});
it('when supplied a NUMBER', () => {
clientId = clientId_number;
});
afterEach((done) => {
svc.authorize(clientId, locationId, screenId).then((result) => {
console.log(`result ${result}`);
done();
}, (e) => {
console.log(`failed with error ${e}`);
done();
});
});
});
});
I can test one-level-down promises, but how to I set up my tests to be able to handle situations like this?
Finally got it figured out. I believe it stemmed from a confusion between creating Promise instances versus their resolvers.
The new beforeEach block looks like this:
beforeEach(() => {
spyOn(svc, 'keyPath').and.returnValue(path);
spyOn(svc, 'storedToken').and.returnValue(stored);
let axiosPromise = new Promise((resolve, reject) => {
var responseData = {
data: {
ok: true,
message: 'test-response',
payloadUrl: 'http://payload-url.com'
}
};
resolve(responseData);
});
spyOn(svc.axios, 'get').and.callFake(
()=>{
return axiosPromise;
}
);
});
My tests now pass.

Adding images and CSS files ALONGSIDE HTML file in service worker for offline caching

I have this service worker:
'use strict';
const CACHE_VERSION = 1;
let CURRENT_CACHES = {
offline: 'offline-v' + CACHE_VERSION
};
const OFFLINE_URL = 'http://example.com/offline.html';
function createCacheBustedRequest(url) {
let request = new Request(url, {cache: 'reload'});
if ('cache' in request) {
return request;
}
let bustedUrl = new URL(url, self.location.href);
bustedUrl.search += (bustedUrl.search ? '&' : '') + 'cachebust=' + Date.now();
return new Request(bustedUrl);
}
self.addEventListener('install', event => {
event.waitUntil(
fetch(createCacheBustedRequest(OFFLINE_URL)).then(function(response) {
return caches.open(CURRENT_CACHES.offline).then(function(cache) {
return cache.put(OFFLINE_URL, response);
});
})
);
});
self.addEventListener('activate', event => {
let expectedCacheNames = Object.keys(CURRENT_CACHES).map(function(key) {
return CURRENT_CACHES[key];
});
event.waitUntil(
caches.keys().then(cacheNames => {
return Promise.all(
cacheNames.map(cacheName => {
if (expectedCacheNames.indexOf(cacheName) === -1) {
console.log('Deleting out of date cache:', cacheName);
return caches.delete(cacheName);
}
})
);
})
);
});
self.addEventListener('fetch', event => {
if (event.request.mode === 'navigate' ||
(event.request.method === 'GET' &&
event.request.headers.get('accept').includes('text/html'))) {
console.log('Handling fetch event for', event.request.url);
event.respondWith(
fetch(createCacheBustedRequest(event.request.url)).catch(error => {
console.log('Fetch failed; returning offline page instead.', error);
return caches.match(OFFLINE_URL);
})
);
}
});
It's the standard service worker -
This is my service worker:
'use strict';
const CACHE_VERSION = 1;
let CURRENT_CACHES = {
offline: 'offline-v' + CACHE_VERSION
};
const OFFLINE_URL = 'offline.html';
function createCacheBustedRequest(url) {
let request = new Request(url, {cache: 'reload'});
if ('cache' in request) {
return request;
}
let bustedUrl = new URL(url, self.location.href);
bustedUrl.search += (bustedUrl.search ? '&' : '') + 'cachebust=' + Date.now();
return new Request(bustedUrl);
}
self.addEventListener('install', event => {
event.waitUntil(
fetch(createCacheBustedRequest(OFFLINE_URL)).then(function(response) {
return caches.open(CURRENT_CACHES.offline).then(function(cache) {
return cache.put(OFFLINE_URL, response);
});
})
);
});
self.addEventListener('activate', event => {
let expectedCacheNames = Object.keys(CURRENT_CACHES).map(function(key) {
return CURRENT_CACHES[key];
});
event.waitUntil(
caches.keys().then(cacheNames => {
return Promise.all(
cacheNames.map(cacheName => {
if (expectedCacheNames.indexOf(cacheName) === -1) {
console.log('Deleting out of date cache:', cacheName);
return caches.delete(cacheName);
}
})
);
})
);
});
self.addEventListener('fetch', event => {
if (event.request.mode === 'navigate' ||
(event.request.method === 'GET' &&
event.request.headers.get('accept').includes('text/html'))) {
console.log('Handling fetch event for', event.request.url);
event.respondWith(
fetch(createCacheBustedRequest(event.request.url)).catch(error => {
console.log('Fetch failed; returning offline page instead.', error);
return caches.match(OFFLINE_URL);
})
);
}
});
It's the standard offline cache service worker: https://googlechrome.github.io/samples/service-worker/custom-offline-page/
How do I cache images and CSS files? Right now, I create an offline page with in-line CSS and converted by images into SVG code. This is not ideal.
Do I need to have multiple service workers with different IDs for images for offline-caching?
Or can I use 1 service worker for multiple elements for offline caching?
Caches can store multiple requests so you can call cache.put() several times, you could write:
var ASSETS = ['/index.html', '/js/index.js', '/style/style.css'];
self.oninstall = function (evt) {
evt.waitUntil(caches.open('offline-cache-name').then(function (cache) {
return Promise.all(ASSETS.map(function (url) {
return fetch(url).then(function (response) {
return cache.put(url, response);
});
}));
}))
};
Or, similarly and shorter, using addAll():
var ASSETS = ['/index.html', '/js/index.js', '/style/style.css'];
self.oninstall = function (evt) {
evt.waitUntil(caches.open('offline-cache-name').then(function (cache) {
return cache.addAll(ASSETS);
}))
};
You can find an example loading the set of assets from an external resource in the Service Worker Cookbook.

Categories

Resources