Only show notifications if tab is closed - javascript

I have a node.js push notification server (with websocket server also running on it), and it is sending push events whenever the websocket server receives a message. The problem with this is that no matter what happens, when the websocket server gets a message, it broadcasts a message no matter what. While this is fine, it can become a problem for clients actively on the website, because the notification is unnecessary as the person can already see what happened, and the serviceworker can't access the DOM so it can't tell if the window is open or not. I tried the navigator.serviceWorker.controller.postMessage method to tell the server to either "stop" the notification service, or "start" the notification service in the below code. It all seems correct and I'm not getting any errors, but it doesn't seem to be working as it shows a notification whether the window is open or not.
(main.js)
window.onload = function() {
if (Notification.permission !== "granted") {
Notification.requestPermission((permit)=>{
console.log(permit);
});
}
subscribe().catch(error => console.error(error));
navigator.serviceWorker.controller.postMessage("stop"); //Tell serviceWorker to not show notifications
}
window.onbeforeunload = function() {
navigator.serviceWorker.controller.postMessage("start"); //Tell serviceWorker that it is allowed show notifications
}
function urlBase64ToUint8Array(base64String) {const padding = '='.repeat((4 - base64String.length % 4) % 4);const base64 = (base64String + padding).replace(/-/g, '+').replace(/_/g, '/');const rawData = window.atob(base64);const outputArray = new Uint8Array(rawData.length);for (let i = 0; i < rawData.length; ++i) {outputArray[i] = rawData.charCodeAt(i);}return outputArray;}
const publicVapidKey = <key here>;
async function subscribe() {
if ('serviceWorker' in navigator) {
//register serviceworker
const register = await navigator.serviceWorker.register('/sw.js', {
scope: '/'
});
//subscribe to notifications
const subscription = await register.pushManager.subscribe({
userVisibleOnly: true,
applicationServerKey: urlBase64ToUint8Array(publicVapidKey),
});
//send subscription to server
await fetch('http://remoteserver.com:8080/subscribe', {
method: 'POST',
body: JSON.stringify(subscription),
headers: {
'Content-Type': 'application/json'
}
});
} else {console.error('Service workers are not supported in this browser');}
}
(sw.js)
var allowed;
self.addEventListener('message', event => {
if (event.data == "stop") {
allowed = false;
} else if (event.data == "start") {
allowed = true;
}
});
self.addEventListener('push', event => {
if (allowed == true) {
const data = event.data.json();
self.registration.showNotification(data.title, {
body: data.body,
icon: data.icon
});
}
});
Does anyone know what is going on?

The service worker is a mediary between your client app and server.
If the client is determined to be active (up to you to make that determination in your code — see links below), it should communicate to the sw informing it as such.
You can implement a heartbeat/ping-pong between the sw and your client (not unlike websockets) that occurs at an interval, and if the client fails to respond (or responds in an enumeration that isn't "active"), then your sw will know, and can respond appropriately (whether that's simply not emitting the notifications received from the server, or unsubscribing entirely, etc.)
More reading: Page Visibility API, Document.hasFocus(), etc.

Related

How to stay Signed In for Gmail api without NodeJs?

I want to get the messages of users by gmail api. For that google authorization is needed. I managed to authorize the user by following code -
let authBtn = document.getElementById('authorize_button');
const CLIENT_ID = 'XXXXXX-XXXXXXXXXXX.apps.googleusercontent.com';
const API_KEY = 'XXXXXX-XXXXXXXXXXXXXXX';
const DISCOVERY_DOC = 'https://www.googleapis.com/discovery/v1/apis/gmail/v1/rest';
const SCOPES = 'https://www.googleapis.com/auth/gmail.readonly';
let tokenClient;
let gapiInited = false;
let gisInited = false;
authBtn.style.visibility = 'hidden';
function gapiLoaded() {
gapi.load('client', intializeGapiClient);
}
async function intializeGapiClient() {
await gapi.client.init({
apiKey: API_KEY,
discoveryDocs: [DISCOVERY_DOC],
});
gapiInited = true;
maybeEnableButtons();
}
function gisLoaded() {
tokenClient = google.accounts.oauth2.initTokenClient({
client_id: CLIENT_ID,
scope: SCOPES,
callback: '',
});
gisInited = true;
maybeEnableButtons();
}
function maybeEnableButtons() {
if (gapiInited && gisInited) {
authBtn.style.visibility = 'visible';
}
}
function handleAuthClick() {
tokenClient.callback = async (resp) => {
if (resp.error !== undefined) throw (resp);
authBtn.innerText = 'Refresh';
await getMessages();
};
if (gapi.client.getToken() === null) {
tokenClient.requestAccessToken({prompt: 'consent'});
} else {
tokenClient.requestAccessToken({prompt: ''});
}
}
In above code gapi.client.getToken() === null is always false. Everytime I refresh the page I have to reauthorize user with prompt: 'consent'.
I also want user to stay signed in until user sign out.
How can I achieve by modifying the above code?
Can Please someone help me?
You are using a system that requires a server-side authentication flow, read about properly handling that here:
https://developers.google.com/identity/gsi/web/guides/verify-google-id-token
The gapi JavaScript is browser code (you obviously know this because the question specifies all sorts of DOM related code), and therefore Authentication is fundamentally not going to be possible entirely in the browser without a server-side flow to handle the callbacks from Google that occur out-of-band from the browser.
The only exception I can find to the rule of having a server-side component is to credential manager API:
https://developers.google.com/identity/gsi/web/guides/display-browsers-native-credential-manager
It seems to significantly simplify things, but from what I can tell supports Chrome only (maybe including chrome-based browsers Edge, Brave, etc. but maybe not Chromium as it seems to be needing Google accounts in the browser itself, e.g. login is not managed by your code for your website but the user using the browser directly before they visit your site)

Web worker Integration

I want to use web worker to handle my zipcode checker function, I haven't worked with web worker before so the concept is new to me
This is my zipcode function
``
function checkZipCode() {
event.preventDefault();
if(document.getElementById('zipcode').value < 20000) {
document.getElementById('zip-result').innerHTML = 'Sorry, we haven’t expanded to that area yet';
} else if (document.getElementById('zipcode').value >= 20000) {
document.getElementById('zip-result').innerHTML = 'We’ve got your area covered!'
} else {
return null
}
};
As per the docs workers are pretty easy to spin up:
//in a JS file
const myWorker = new Worker('./myWorker.js');//worker requested and top-level scope code executed
myWorker.postMessage('hello');
myWorker.addEventListener('message', e => {
//e.data will hold data sent from worker
const message = e.data;
console.log(message); // HELLO
//if it's just a one-time thing, you can kill the worker
myWorker.terminate();
}
myWorker.addEventListener('error', e => {//worker might throw an error
e.preventDefault();
console.log(e.message, `on line ${e.lineno}`);
});
//myWorker.js
//run whatever you need, just no DOM stuff, no window etc
console.log('this line runs when worker loads');
addEventListener('message', (e) => {
postMessage(e.data.toUpperCase());//up-case message and send it right back
});

Handle different HTTP-like protocol myself on Node's http server

I am running Node.js server with express. I'd also like the server to accept IceCast audio stream.
I could use another port, sure, but not all hostings (like Heroku) allow that. Ice cast's stream request looks like this:
SOURCE /mountpoint ICE/1.0\n
content-type: audio/mpeg\n
Authorization: Basic USER+PASS base64encoded\n
ice-name: This is my server name\n
ice-url: http://www.oddsock.org\n
ice-genre: Rock\n
ice-bitrate: 128\n
ice-private: 0\n
ice-public: 1\n
ice-description: This is my server description\n
ice-audio-info: ice-samplerate=44100;ice-bitrate=128;ice-channels=2\n
\n
After that, audio stream follows. I wrote a separate server that handles this on another port and it works fine.
var headers = "";
var headersEnd = false;
var mp3;
const audioServer = net.createServer(function (socket) {
if (mp3) {
socket.write("HTTP/1.0 403 Client already connected\r\n\r\n");
socket.end();
socket.on("error", (e) => {});
return;
}
mp3 = fs.createWriteStream("test.mp3", { encoding: null, flags: "a" });
socket.on("data", (data) => {
if (!headersEnd) {
var tmp = "";
for (let i = 0, l = data.byteLength; i < l; ++i) {
const item = data[i];
if (item == CR_NUMBER)
continue;
const character = String.fromCharCode(item);
tmp += character;
headers += character;
if (headers.endsWith("\n\n")) {
headersEnd = true;
console.log("ICE CAST HEADERS: \n", headers.replace(/\n/g, "\\n\n").replace(/\r/g, "\\r"));
break;
}
}
}
else {
mp3.write(data);
}
});
socket.on("close", () => {
console.log("ICE CAST: END");
if (mp3) {
mp3.close();
mp3 = null;
}
});
socket.on("error", (e) => {
console.log("ICE CAST: ERROR" + e.message);
socket.end();
});
});
audioServer.listen(11666);
What I'd like is to somehow bootstrap node's HTTP server so that I can stream over the same port.
I tried to access the req connection info, that doesn't really work, because the server does not even let the SOURCE /mountpoint ICE/1.0 through.
const server = http.createServer(function (req, res) {
/// does not happen, server closes the connection from icecast
if (handleAudioStream(req, res)) {
return;
}
else {
return expressApp(req, res);
}
});
So I'd need to go deeper. I tried to inspect the net and http code, but didn't fund anything useful.
How can I do this? I really need to use same port, and since icecast DOES send the HTTP-like headers, it should be possible.
This isn't trivial, but possible. You can do some duck punching/monkey patching. See this answer: https://stackoverflow.com/a/24298059/362536
Also, it may be possible to get official support some day, but we're a ways off from that. The first blocker was the non-standard SOURCE method. I sponsored a bounty on that and Ben Noordhuis was kind enough to implement last week: https://github.com/nodejs/http-parser/issues/405 It should land in Node.js eventually.
The next issue is the ICE/1.0. I've opened an issue for that here: https://github.com/nodejs/http-parser/issues/410 There hasn't been any objection to adding it to the parser yet, but if you want to add a pull request, that might help a chance of approval.
You'll find other compatibility issues as well as you continue down this road, but all I've hit I've been able to overcome with various solutions. The trick is, maintaining strict compatibility with the Node.js core as it is updated.

WebRTC Failed to Add a Third Peer: "Cannot set remote answer in state stable"

I am writing a multi-peer WebRTC video chat.
Two peers have no trouble connecting, no error or warning in console, and video works well, but I cannot add a third party to the chat successfully.
On the host (the first participant, Firefox), the error appear as "Cannot set remote answer in state stable" when trying to create an answer. At the second participant (Chrome), the error is "Failed to set remote answer sdp: Called in wrong state: STATE_INPROGRESS". At he third peer, the error is "the error is "Failed to set remote answer sdp: Called in wrong state: STATE_RECEIVEDINITIATE".
As it turn out, the first peer failed to have video with the third peer. Other two links appear fine.
Generally, my communication model is as below, self_id is a unique id per each peer in the session, and locate_peer_connection() will return the local peer_connection of the particular peer from which we receive message:
a new client send "peer_arrival" to the session using signalling server
all peers already in the session setlocaldescription, create offer and send to the new client
new client create answers to all other peers and setremotedescription
new client has video coming up
Signalling is done using WebSocket on a node.js server.
I have some of the core code below, some more note:
self_id is an unique id per client in a session
peer_connection stores peerConnection to other nodes, and peer_id store the respective user_id of these objects
local_stream is the local video stream from getUserMedia (already considered different browser)
Any insights in to the issue? Is there something wrong with my model?
// locate a peer connection according to its id
function locate_peer_connection(id) {
var index = peer_id.indexOf(id);
// not seen before
if (index == -1) {
add_peer_connection();
peer_id.push(id);
index = peer_id.length - 1;
}
return index;
}
// add a peer connection
function add_peer_connection() {
console.log('add peer connection');
// add another peer connection for use
peer_connection.push(new rtc_peer_connection({ "iceServers": [{ "url": "stun:"+stun_server }]}));
// generic handler that sends any ice candidate to the other peer
peer_connection[peer_connection.length - 1].onicecandidate = function (ice_event) {
if (ice_event.candidate) {
signaling_server.send(
JSON.stringify({
type: "new_ice_candidate",
candidate: ice_event.candidate,
id: self_id,
token:call_token
})
);
console.log('send new ice candidate, from ' + self_id);
}
};
// display remote video streams when they arrive using local <video> MediaElement
peer_connection[peer_connection.length - 1].onaddstream = function (event) {
video_src.push(event.stream); // store this src
video_src_id.push(peer_connection.length - 1);
if (video_src.length == 1) { // first peer
connect_stream_to_src(event.stream, document.getElementById("remote_video"));
// video rotating function
setInterval(function() {
// rorating video src
var video_now = video_rotate;
if (video_rotate == video_src.length - 1) {
video_rotate = 0;
} else {
video_rotate++;
}
var status = peer_connection[video_src_id[video_rotate]].iceConnectionState;
if (status == "disconnected" || status == "closed") { // connection lost, do not show video
console.log('connection ' + video_rotate + ' liveness check failed');
} else if (video_now != video_rotate) {
connect_stream_to_src(video_src[video_rotate], document.getElementById("remote_video"));
}
}, 8000);
// hide placeholder and show remote video
console.log('first remote video');
document.getElementById("loading_state").style.display = "none";
document.getElementById("open_call_state").style.display = "block";
}
console.log('remote video');
};
peer_connection[peer_connection.length - 1].addStream(local_stream);
}
// handle new peer
function new_peer(signal) {
// locate peer connection
var id = locate_peer_connection(signal.id);
console.log('new peer ' + id);
// create offer
peer_connection[id].createOffer(function(sdp) {
peer_connection[id].setLocalDescription(sdp,
function() { // call back
console.log('set local, send offer, connection '+ id);
signaling_server.send(
JSON.stringify({
token: call_token,
id: self_id,
type:"new_offer",
sdp: sdp
})
);
}, log_error);
}, log_error);
}
// handle offer
function new_offer_handler(signal) {
var id = locate_peer_connection(signal.id);
console.log('new offer ' + id);
// set remote description
peer_connection[id].setRemoteDescription(
new rtc_session_description(signal.sdp),
function() { // call back
peer_connection[id].createAnswer(function(sdp) {
peer_connection[id].setLocalDescription(sdp, function () {
console.log('set local, send answer, connection '+ id);
signaling_server.send(
JSON.stringify({
token: call_token,
id: self_id,
type:"new_answer",
sdp: sdp
})
);
},
log_error);
}, log_error);
}, log_error);
}
// handle answer
function new_answer_handler(signal) {
var id = locate_peer_connection(signal.id);
console.log('new answer ' + id);
peer_connection[id].setRemoteDescription(new rtc_session_description(signal.sdp),
function() {
console.log('receive offer answer, set remote, connection '+ id);
}
, log_error);
}
// handle ice candidate
function ice_candidate_handler(signal) {
var id = locate_peer_connection(signal.id);
console.log('get new_ice_candidate from ' + id);
if (typeof(RTCIceCandidate) != "undefined") {
peer_connection[id].addIceCandidate(
new RTCIceCandidate(signal.candidate)
);
} else { // firefox
peer_connection[id].addIceCandidate(
new mozRTCIceCandidate(signal.candidate)
);
}
}
function event_handler(event) {
var signal = JSON.parse(event.data);
if (signal.type === "peer_arrival") {
new_peer(signal);
} else if (signal.type === "new_ice_candidate") {
ice_candidate_handler(signal);
} else if (signal.type === "new_offer") { // get peer description offer
new_offer_handler(signal);
} else if (signal.type === "new_answer") { // get peer description answer
new_answer_handler(signal);
} else if (signal.type === "new_chat_message") { // chat message and file sharing info
add_chat_message(signal);
} else if (signal.type === "new_file_thumbnail_part") { // thumbnail
store_file_part(signal.name, "thumbnail", signal.id, signal.part, signal.length, signal.data);
if (file_store[signal.id].thumbnail.parts.length == signal.length) {
document.getElementById("file_list").innerHTML = get_file_div(signal.id, signal.name)+document.getElementById("file_list").innerHTML;
document.getElementById("file-img-"+signal.id).src = file_store[signal.id].thumbnail.parts.join("");
}
} else if (signal.type === "new_file_part") { // file
console.log('get new_file_part ' + signal.id);
store_file_part(signal.name, "file", signal.id, signal.part, signal.length, signal.data);
update_file_progress(signal.name, signal.id, file_store[signal.id].file.parts.length, signal.length);
}
}
// generic error handler
function log_error(error) {
console.log(error);
}
I might be wrong, but these two must be your major problems:
signaling_server.send(... I am not seeing any target here, so guessing that the server just broadcasts this message to everyone. When you are sending an sdp to already established peer connection, you are bound to get the error which you are getting now. My suggesting would be to add a target id in the message, either the server can forward it to that particular peer or, server can just broadcast, but event_handler of the peer can check if the target id of message is same as it's own id, if not, just ignore the message.
onicecandidate event, you are broadcasting the ICE candidates to all remote peers, again this is meant for single peer, another issue might be, addIceCandidate on PeerConnection before setting it's local and remote description would throw error, you need to some sort of mechanism to handle this( add ICE candidates only after setting the connections descriptions).
finally a suggestion. I am guessing peer_connection is an Array, if you change it Object, you can remove the redundancy of locate_peer_connection,
you can do something like.
if(peer_connection[signal.id]){
//do something...
}else{
peer_connection[signal.id] = new PeerConnection(...
}
i had the same problem when i was implementing one-to-many rtc broadcast, and what mido22 said is right. you might be sending/resetting existing established peer object with other incoming client. you have to create new RTCPeerConenction object for every new incoming client. the general work flow would be as follow.
peerconnection=[];//empty array
then you initialize your media devices with getUserMedia
and store media stream into global variable so that it can be added when creating offer.
once this is done you inform your singalling server with unique id
your signalling server may then broadcast this to all clients except from which it is received. each client will then check if that unique id does exist in there peerconnection array and like mido22 said you can do this as
if(peerconnection[signal.id])
{
//do something with existing peerconnections
}
else
{
peerconnection[signal.id]=new RTCPeerConnection({"stun_server_address"});
//register peerconnection[signal.id].onicecandidate callback
//register peerconnection[signal.id].onaddstream callback
//createoffer
//if local stream is ready then
peerconnection[signal.id].addStream(localStream);
//and rest of the stuff go as it is like in one-to-one call..
//setLocalDescriptor setRemoteDescriptor
}

CometD taking more time in pushing messages

I am trying to implement CometD in our application. But it is taking more time compared to the existing implementation in our project. The existing system is taking time in milliseconds where as CometD is taking 2 seconds to push the message.
I am not sure where I am going wrong. Any guidance will help me lot.
My code:
Java script at client side
(function($)
{
var cometd = $.cometd;
$(document).ready(function()
{
function _connectionEstablished()
{
$('#body').append('<div>CometD Connection Established</div>');
}
function _connectionBroken()
{
$('#body').append('<div>CometD Connection Broken</div>');
}
function _connectionClosed()
{
$('#body').append('<div>CometD Connection Closed</div>');
}
// Function that manages the connection status with the Bayeux server
var _connected = false;
function _metaConnect(message)
{
if (cometd.isDisconnected())
{
_connected = false;
_connectionClosed();
return;
}
var wasConnected = _connected;
_connected = message.successful === true;
if (!wasConnected && _connected)
{
_connectionEstablished();
}
else if (wasConnected && !_connected)
{
_connectionBroken();
}
}
// Function invoked when first contacting the server and
// when the server has lost the state of this client
function _metaHandshake(handshake)
{
if (handshake.successful === true)
{
cometd.batch(function()
{
cometd.subscribe('/java/test', function(message)
{
$('#body').append('<div>Server Says: ' + message.data.eventID + ':'+ message.data.updatedDate + '</div>');
});
});
}
}
// Disconnect when the page unloads
$(window).unload(function()
{
cometd.disconnect(true);
});
var cometURL = "http://localhost:8080/cometd2/cometd";
cometd.configure({
url: cometURL,
logLevel: 'debug'
});
cometd.addListener('/meta/handshake', _metaHandshake);
cometd.addListener('/meta/connect', _metaConnect);
cometd.handshake();
});
})(jQuery);
Comet service class
#Listener("/service/java/*")
public void processMsgFromJava(ServerSession remote, ServerMessage.Mutable message)
{
Map<String, Object> input = message.getDataAsMap();
String eventId = (String)input.get("eventID");
//setting msg id
String channelName = "/java/test";
// Initialize the channel, making it persistent and lazy
bayeux.createIfAbsent(channelName, new ConfigurableServerChannel.Initializer()
{
public void configureChannel(ConfigurableServerChannel channel)
{
channel.setPersistent(true);
channel.setLazy(true);
}
});
// Publish to all subscribers
ServerChannel channel = bayeux.getChannel(channelName);
channel.publish(serverSession, input, null);
}
Is there any thing I need to change in server side code.
You have made your channel lazy, so a delay in message broadcasting is expected (that is what lazy channels are all about).
Please have a look at the documentation for lazy channels.
If you want immediate broadcasting don't set the channel as lazy.

Categories

Resources