how to update a class property in the function of event handler? - javascript

Please help to how to access the property of class in the event handler function.
I want to update property of class in a handle function of web speech API, while it always fail, I do not know how to transfer the class object to the event handler function.
The error code is "Cannot read property 'resultIndex' of undefined" in following code
class SpeechToText {
constructor() {
this.recognition = null;
this.text = 'original';
this.getDataFromRecognition = this.getDataFromRecognition.bind(this);
}
getDataFromRecognition(event){
console.log(this.text);
var final_transcript = '';
var interim_transcript = '';
for (var i = event.resultIndex; i < event.results.length; ++i) {
if (event.results[i].isFinal) {
final_transcript += event.results[i][0].transcript;
console.log('result is Final' + final_transcript);
} else {
interim_transcript += event.results[i][0].transcript;
console.log('interim transcript' + interim_transcript);
}
}
this.text = final_transcript;
}
listening() {
let recognition = new window.webkitSpeechRecognition();
recognition.lang = 'zh-CN';
recognition.interimResults = false;
recognition.continuous = false;
recognition.start();
recognition.onresult = this.getDataFromRecognition(event);
recognition.onspeechend = function() {
this.stop();
}
recognition.onnomatch = function(event) {
console.log( "It is no match.");
}
recognition.onerror = function(event) {
console.log('Error occurred in recognition: ' + event.error);
}
}
}
#
on following code, this.text in the function "this.onresult = function(event)" will create a new property of SpeechRecognition.
class SpeechToText {
constructor() {
this.text = 'original';
this.recognition = null;
}
setText(str){
this.text = str;
return this.text;
console.log('this in on setText ' + this);
}
getText(){
return this.text;
}
listening() {
this.recognition = new window.webkitSpeechRecognition();
this.recognition.lang = 'zh-CN';
//this.recognition.lang = 'en-GB';
this.recognition.interimResults = false;
this.recognition.continuous = false;
this.recognition.start();
console.log("this under class" + this);
this.recognition.onresult = function(event) {
// The SpeechRecognitionEvent results property returns a SpeechRecognitionResultList object
// The SpeechRecognitionResultList object contains SpeechRecognitionResult objects.
// It has a getter so it can be accessed like an array
// The [last] returns the SpeechRecognitionResult at the last position.
// Each SpeechRecognitionResult object contains SpeechRecognitionAlternative objects that contain individual results.
// These also have getters so they can be accessed like arrays.
// The [0] returns the SpeechRecognitionAlternative at position 0.
// We then return the transcript property of the SpeechRecognitionAlternative object
this.recognition = new window.webkitSpeechRecognition();
var final_transcript = '';
var interim_transcript = '';
for (var i = event.resultIndex; i < event.results.length; ++i) {
if (event.results[i].isFinal) {
final_transcript += event.results[i][0].transcript;
console.log('result is Final' + final_transcript);
} else {
interim_transcript += event.results[i][0].transcript;
console.log('interim transcript' + interim_transcript);
}
}
//_utteranceForEdgeTrigger = final_transcript;
//final_transcript = capitalize(final_transcript);
console.log('final_transcript' + final_transcript);
//this.setText(final_transcript);
this.text = final_transcript;
console.log('the text is in listening function :' + this.text);
console.log('the this in on result is ' + this);
//window.SpeechToText.getText();
}
this.recognition.onspeechend = function() {
console.log('this in function onspeechend' + this);
this.stop();
console.log('speech end')
}
this.recognition.onnomatch = function(event) {
console.log( "I didn't recognise that color.");
}
this.recognition.onerror = function(event) {
console.log('Error occurred in recognition: ' + event.error);
}
}
}
var speech = new SpeechToText();
//speech.getbx();
speech.listening();
}

Related

Google Speech Recognizer - define 2 optional languages

I developed a web application base on JavaScript, with Google Speech Recognizer API.
The main language should be Hebrew, and supposed to help doctors to write medical diagnosis.
The problem is that if I say medical word in English like C.T or Diabetes that in professional language should be say in English, it writes the English word in Hebrew.
My question is, if there is any option to define multi languages option or define languages priorities that when it detect unfamiliar word it will try the second choice????
Please help me Thank you!
This is my JavaScript Code:
var langs =
[['Afrikaans', ['af-ZA']],
['Hebrew', ['he-IL']],
['Bahasa Melayu', ['ms-MY']],
['Català', ['ca-ES']],
['Čeština', ['cs-CZ']],
['Deutsch', ['de-DE']],
['English', ['en-AU', 'Australia'],
['en-CA', 'Canada'],
['en-IN', 'India'],
['en-NZ', 'New Zealand'],
['en-ZA', 'South Africa'],
['en-GB', 'United Kingdom'],
['en-US', 'United States']],
['Español', ['es-AR', 'Argentina'],
['es-BO', 'Bolivia'],
['es-CL', 'Chile'],
['es-CO', 'Colombia'],
['es-CR', 'Costa Rica'],
['es-EC', 'Ecuador'],
['es-SV', 'El Salvador'],
['es-ES', 'España'],
['es-US', 'Estados Unidos'],
['es-GT', 'Guatemala'],
['es-HN', 'Honduras'],
['es-MX', 'México'],
['es-NI', 'Nicaragua'],
['es-PA', 'Panamá'],
['es-PY', 'Paraguay'],
['es-PE', 'Perú'],
['es-PR', 'Puerto Rico'],
['es-DO', 'República Dominicana'],
['es-UY', 'Uruguay'],
['es-VE', 'Venezuela']],
['Euskara', ['eu-ES']],
['Français', ['fr-FR']],
['Galego', ['gl-ES']],
['Hrvatski', ['hr_HR']],
['IsiZulu', ['zu-ZA']],
['Íslenska', ['is-IS']],
['Italiano', ['it-IT', 'Italia'],
['it-CH', 'Svizzera']],
['Magyar', ['hu-HU']],
['Nederlands', ['nl-NL']],
['Norsk bokmål', ['nb-NO']],
['Polski', ['pl-PL']],
['Português', ['pt-BR', 'Brasil'],
['pt-PT', 'Portugal']],
['Română', ['ro-RO']],
['Slovenčina', ['sk-SK']],
['Suomi', ['fi-FI']],
['Svenska', ['sv-SE']],
['Türkçe', ['tr-TR']],
['български', ['bg-BG']],
['Pусский', ['ru-RU']],
['Српски', ['sr-RS']],
['한국어', ['ko-KR']],
['中文', ['cmn-Hans-CN', '普通话 (中国大陆)'],
['cmn-Hans-HK', '普通话 (香港)'],
['cmn-Hant-TW', '中文 (台灣)'],
['yue-Hant-HK', '粵語 (香港)']],
['日本語', ['ja-JP']],
['Lingua latīna', ['la']]];
for (var i = 0; i < langs.length; i++) {
select_language.options[i] = new Option(langs[i][0], i);
}
select_language.selectedIndex = 1;
updateCountry();
select_dialect.selectedIndex = 1;
showInfo('info_start');
function updateCountry() {
for (var i = select_dialect.options.length - 1; i >= 0; i--) {
select_dialect.remove(i);
}
var list = langs[select_language.selectedIndex];
for (var i = 1; i < list.length; i++) {
select_dialect.options.add(new Option(list[i][1], list[i][0]));
}
select_dialect.style.visibility = list[1].length == 1 ? 'hidden' : 'visible';
}
var create_email = false;
var final_transcript = '';
var recognizing = false;
var ignore_onend;
var start_timestamp;
if (!('webkitSpeechRecognition' in window)) {
console.log("before upgrade")
upgrade();
} else {
start_button.style.display = 'inline-block';
var recognition = new webkitSpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.onstart = function() {
recognizing = true;
showInfo('info_speak_now');
console.log("webkitSpeechRecognition")
start_img.src = '/static/Mic_MicroPhone_Mute.PNG';
start_pic=document.getElementById("start_img")
start_pic.style.height="59px";
start_pic.style.width="43px";
};
recognition.onerror = function(event) {
if (event.error == 'no-speech') {
console.log("event error");
start_img.src = '/static/mic.png';
showInfo('info_no_speech');
ignore_onend = true;
}
if (event.error == 'audio-capture') {
console.log("event caputre");
start_img.src = '/static/mic.png';
showInfo('info_no_microphone');
ignore_onend = true;
}
if (event.error == 'not-allowed') {
console.log("even not allowed");
if (event.timeStamp - start_timestamp < 100) {
showInfo('info_blocked');
} else {
showInfo('info_denied');
}
ignore_onend = true;
}
};
recognition.onend = function() {
recognizing = false;
if (ignore_onend) {
return;
}
console.log("init mic image");
start_img.src = '/static/mic.png';
if (!final_transcript) {
showInfo('info_start');
return;
}
showInfo('');
// if (window.getSelection) {
// window.getSelection().removeAllRanges();
// var range = document.createRange();
// range.selectNode(document.getElementById('final_span'));
// window.getSelection().addRange(range);
// }
if (create_email) {
create_email = false;
createEmail();
}
};
recognition.onresult = function(event) {
var interim_transcript = '';
for (var i = event.resultIndex; i < event.results.length; ++i) {
if (event.results[i].isFinal) {
console.log("Final!!!!!!!!!!!!")
console.log(final_span.value+event.results[i][0].transcript);
final_transcript = final_span.value+event.results[i][0].transcript;
final_span.value=final_transcript;
} else {
interim_transcript += event.results[i][0].transcript;
}
}
final_transcript = capitalize(final_transcript);
console.log("interim_transcript :");
console.log(interim_transcript);
console.log("final_transcript :");
console.log(final_transcript);
// final_span.value = linebreak(interim_transcript);
if (final_transcript) {
console.log("final transcript true");
// final_span.value= linebreak(final_transcript);
}
if (final_transcript || interim_transcript) {
showButtons('inline-block');
}
};
}
function upgrade() {
start_button.style.visibility = 'hidden';
showInfo('info_upgrade');
}
var two_line = /\n\n/g;
var one_line = /\n/g;
function linebreak(s) {
return s.replace(two_line, '<p></p>').replace(one_line, '<br>');
}
var first_char = /\S/;
function capitalize(s) {
return s.replace(first_char, function(m) { return m.toUpperCase(); });
}
function createEmail() {
var n = final_transcript.indexOf('\n');
if (n < 0 || n >= 80) {
n = 40 + final_transcript.substring(40).indexOf(' ');
}
var subject = encodeURI(final_transcript.substring(0, n));
var body = encodeURI(final_transcript.substring(n + 1));
textarea_value=document.getElementById("final_span").value;
window.location.href = 'mailto:?subject=' + "Puzzle-Soft Telemedicine - Speech Recognizer " + '&body=' + textarea_value;
}
function copyButton() {
if (recognizing) {
recognizing = false;
recognition.stop();
}
copy_button.style.display = 'none';
copy_info.style.display = 'inline-block';
showInfo('');
}
function emailButton() {
if (recognizing) {
create_email = true;
recognizing = false;
recognition.stop();
} else {
createEmail();
}
email_button.style.display = 'none';
email_info.style.display = 'inline-block';
showInfo('');
}
function startButton(event) {
if (recognizing) {
recognition.stop();
console.log("StartButton if")
return;
}
console.log("StartButton else")
final_transcript = '';
recognition.lang = select_dialect.value;
recognition.start();
ignore_onend = false;
// final_span.innerHTML = '';
// interim_span.innerHTML = '';
start_img.src = '/static/mic.png';
showInfo('info_allow');
showButtons('none');
start_timestamp = event.timeStamp;
}
function showInfo(s) {
if (s) {
for (var child = info.firstChild; child; child = child.nextSibling) {
if (child.style) {
child.style.display = child.id == s ? 'inline' : 'none';
}
}
info.style.visibility = 'visible';
} else {
info.style.visibility = 'hidden';
}
}
var current_style;
function showButtons(style) {
if (style == current_style) {
return;
}
current_style = style;
copy_button.style.display = style;
email_button.style.display = style;
copy_info.style.display = 'none';
email_info.style.display = 'none';
}
function ClearText() {
console.log("clear");
text_area_value=document.getElementById("final_span");
console.log(text_area_value);
console.log(text_area_value.value);
text_area_value.value='';
}
//var givevalue = function (my_key) {
// return dict[my_key];
//
// }
//
//js_json={'אדרנלypr': 'Olanzapine Teva', 'זיפאדהרה': 'Zypadhera', 'אומפראזול': 'Omeprazole', 'אומפרדקס': 'Omepradex', 'לוסק': 'Losec', 'אומפריקס': 'Omeprix', 'אומפרדקס Z': 'Omepradex Z', 'אומפרה': 'Omepra', 'אונדאנסטרון': 'Ondansetron', 'זופרן': 'Zofran', 'אודנטרון': 'Odnatron', 'אונדאנסטרון - פרזניוס': 'Ondansetron - Fresenius', 'אונדנסטרון אינובמד': 'Ondansetron Inovamed'}
//function replaceTextAreaDict() {
//textarea_result=document.getElementById("final_span");
// textarea_words=textarea_result.split(' ') ;
// for (var i = 0; i < textarea_result.length; i++) {
//
// }
//
//
//
// givevalue()
//
//}
//
//replaceTextAreaDict();
You cannot mix language.
Speech Recognition roughly contains 3 part -> Accoustic model, Language model, and dictionary.
Accoustic model is the result of data training contains relationship between audio signal and phonetic
Dictionary contains words and how they pronounced, for e.g, word TOP are pronounced "T AH P" on the general speech recognition dictionary.
Language model is the connection between words to create sentences, for e.g. the word "I" is connected with "am", so the speech recognizer will very rarely (or never) give the result of "I are" or "I is".
Every Language have their own Accoustic Model (phonetic), Dictionary (words), and Language Model (sentences), so we can just mix them up.
The Question is : Is it still possible?
The Answer is : YES!
You can build your own language (in this case Medical language) using many tools, one I already tried called CMU Sphinx / Pocket Sphinx. You can build your own model, train it, and make a dictionary out of it. It will be alot work to do, but you can configure anything you will need for speech recognition.
Link for any platform implementation : https://github.com/cmusphinx

How to execute if conditional just once

I am wondering me how I do that, because all the time the variable returns to your default value. It's an Ibeacon application, I don't know if I need to show more details about my app. I just want to call the function something once, can anyone help me?
function uint8ArrToHexStringNoSpace(arr) {
return Array.prototype.map.call(arr, function(n) {
var s = n.toString(16);
if(s.length == 1) {
s = '0'+s;
}
return s;
}).join('');
}
var quit;
function something() {
if(quit) {
window.open("info.html");
}
quit = true;
}
function appendTd(root, value, id) {
var text = document.createTextNode(value);
var td = document.createElement("p");
if(id) {
td.id = id;
}
td.appendChild(text);
root.appendChild(td);
}
function hex16(i) {
var s = i.toString(16);
while(s.length < 4) {
s = '0'+s;
}
return s;
}
var beacons = {};
var app = {
initialize: function() {
// Important to stop scanning when page reloads/closes!
window.addEventListener('beforeunload', function(e)
{
iBeacon.stopScan();
});
this.bindEvents();
},
bindEvents: function() {
document.addEventListener('deviceready', this.onDeviceReady, false);
},
onDeviceReady: function() {
//app.receivedEvent('deviceready');
app.startScan();
},
receivedEvent: function(id) {
var parentElement = document.getElementById(id);
var listeningElement = parentElement.querySelector('.listening');
var receivedElement = parentElement.querySelector('.received');
listeningElement.setAttribute('style', 'display:none;');
receivedElement.setAttribute('style', 'display:block;');
console.log('Received Event: ' + id);
},
startScan: function() {
iBeacon.startScan({}, function(beacon) {
//console.log("beacon found: "+beacon.address+" "+beacon.name+" "+beacon.rssi+"/"+beacon.txPower);
var r = beacon.region;
//console.log("M"+r.major.toString(16)+" m"+r.minor.toString(16)+" uuid "+uint8ArrToHexStringNoSpace(r.uuid));
var key = 'tx'+beacon.address.replace(/:/g,'_');
//console.log('key: '+key);
if(beacons[key] == null) {
beacons[key] = beacon;
var root = document.getElementById("beaconListRoot");
var tr = document.createElement("tr");
var br = document.createElement("br");
// <tr><td>Address</td><td>Name</td><td>RSSI</td><td>ID</td><td>UUID</td></tr>
var adress = ' Adress: ';
var name = ' Name: ';
var distance = ' distance: ';
var major = ' Major: ';
var minor = 'Minor: ';
var UUID = ' UUID: ';
appendTd(tr, adress + beacon.address + name + beacon.name);
appendTd(tr, distance + beacon.rssi+" /"+beacon.txPower+"\u00A0"+beacon.estimatedDistance.toFixed(2)+'m', key);
appendTd(tr, major + hex16(r.major)+"\u00A0"+ minor +hex16(r.minor));
appendTd(tr, UUID + uint8ArrToHexStringNoSpace(r.uuid));
root.appendChild(tr);
} else {
var td = document.getElementById(key);
td.firstChild.nodeValue = 'Power: ' + beacon.rssi+"/"+beacon.txPower+ ' Distance: ' + "\u00A0"+beacon.estimatedDistance.toFixed(2)+'m';
}
if(beacon.address == '78:A5:04:13:3B:17' && beacon.estimatedDistance.toFixed(2) <= 10 ){
something();
}
}, function(error) {
console.log("startScan error: " + error);
});
},
};
use localStorage: https://developer.mozilla.org/en/docs/Web/API/Window/localStorage
localStorage (and sessionStorage) let you keep persistent values
function something() {
localStorage.setItem("somethingCalled", "yes");
if(quit) {
window.open("info.html");
}
quit = true;
}
then where you call something():
if (localStorage.getItem("somethingCalled")!="yes") {
something()
}
you may whant to use sessionStorage instead of localStorage (https://developer.mozilla.org/en-US/docs/Web/API/Window/sessionStorage)

Cannot read property `attachEvent` of null

I have a JavaScript file using speechSynthesis.
I keep having this error:
"Cannot read property 'attachEvent' of null" in line 129:
Here it is:
speech.bind = function(event, element, callback) {
**if (element.attachEvent) {**
element.attachEvent('on'+event, callback )
} else if (window.addEventListener) {
element.addEventListener(event, callback ,false);
};
};
I will put in here the role code in here so anyone can check the entire JavaScript:
var speech_def = speech_def || {
container: "#glb-materia"
,insert_before: true
,ico: "http://edg-1-1242075393.us-east-1.elb.amazonaws.com/speech/listen_icon.jpg"
,bt_txt: "OUÇA A REPORTAGEM"
,bt_stop: "PARAR!"
,source: ".materia-conteudo"
,ignore: [
".foto-legenda"
,".frase-materia"
,"script"
,"style"
,"#speech"
,".sub-header"
,".chamada-materia"
,".data-autor"
,".box-tags"
,".saibamais"
]
};
var speech = speech || {};
//Polyfill remove()
Element.prototype.remove = function() {
this.parentElement.removeChild(this);
};
NodeList.prototype.remove = HTMLCollection.prototype.remove = function() {
for (var i = 0, len = this.length; i < len; i++) {
if(this[i] && this[i].parentElement) {
this[i].parentElement.removeChild(this[i]);
}
}
};
//Polyfill innerText
if ( (!('innerText' in document.createElement('a'))) && ('getSelection' in window) ) {
HTMLElement.prototype.__defineGetter__("innerText", function() {
var selection = window.getSelection(),
ranges = [],
str;
for (var i = 0; i < selection.rangeCount; i++) {
ranges[i] = selection.getRangeAt(i);
}
selection.removeAllRanges();
selection.selectAllChildren(this);
str = selection.toString();
selection.removeAllRanges();
for (var i = 0; i < ranges.length; i++) {
selection.addRange(ranges[i]);
}
return str;
})
}
speech.iOS = /(iPad|iPhone|iPod)/g.test( navigator.userAgent );
speech.Android = /(Android)/g.test( navigator.userAgent );
speech.include = function() {
var bt = ""
bt += '<div id="speech" '
+'title="'+speech_def.bt_txt+'" '
+'style="'
+'display: none; '
+'margin: 5px; '
+'font-size: 12px; '
+'font-style: italic; '
+'color: #bbbbbb; '
+'cursor: pointer;"'
+'>';
bt += '<img style="width: 25px; height: 25px;" src="'+speech_def.ico+'"> ';
bt += '<i style="vertical-align: top; line-height: 28px;">'+speech_def.bt_txt+'</i>';
bt += '</div>';
var button = document.createElement("SPAN");
button.innerHTML = bt;
var box = document.querySelectorAll(speech_def.container)[0];
if (speech_def.insert_before) {
box.insertBefore(button,box.firstChild);
} else {
box.appendChild(button)
};
};
speech.stop = function() {
window.speechSynthesis.cancel();
}
speech.stop();
speech.content = function() {
var result = "";
var boxes = speech_def.source.split(",");
boxes.reverse();
for (var n = boxes.length - 1; n >= 0; n--) {
var doc = document.querySelector(boxes[n]);
if (doc) {
doc = doc.cloneNode(true);
for (var i = speech_def.ignore.length - 1; i >= 0; i--) {
var els = doc.querySelectorAll(speech_def.ignore[i]);
for (var j = els.length - 1; j >= 0; j--) {
els[j].remove();
};
};
result += "." + doc.innerText;
};
};
return result;
};
speech.start_speech = function() {
var content = speech.content();
// Note: some voices don't support altering params
if (!speech.Android) speech.msg.voice = speech.voices[0];
// msg.voiceURI = 'native';
speech.msg.volume = speech.iOS?1:1; // 0 to 1
speech.msg.rate = speech.iOS?0.6:1; // 0.1 to 10
speech.msg.pitch = speech.iOS?1:1; // 0 to 2
speech.msg.text = content;
speech.msg.lang = 'pt-BR';
speech.msg.onend = function(e) {
// console.log('Finished in ' + event.elapsedTime + ' seconds.');
};
window.speechSynthesis.speak(speech.msg);
};
speech.read = function(){}; //chrome problem
speech.bind = function(event, element, callback) {
if (element.attachEvent) {
element.attachEvent('on'+event, callback )
} else if (window.addEventListener) {
element.addEventListener(event, callback ,false);
};
};
speech.click = function(e){
event.stopPropagation()
if (window.event) window.event.cancelBubble = true;
var control = document.getElementById("speech");
var label;
if (window.speechSynthesis.speaking) {
label = speech_def.bt_txt;
speech.stop();
} else {
label = speech_def.bt_stop;
speech.start_speech();
};
control.querySelector("i").innerHTML = label;
}
speech.bind_button = function() {
var control = document.getElementById("speech");
speech.bind("click",control,speech.click);
};
speech.show_button = function() {
if (!speech.on_page) {
speech.on_page = true;
speech.include();
speech.bind_button();
};
var control = document.getElementById("speech");
control.style.display="inline-block";
};
speech.test_portuguese = function() {
speech.voices = [];
window.speechSynthesis.getVoices().forEach(function(voice) {
if (voice.lang == "pt-BR") {
speech.voices.push(voice);
};
});
if (speech.Android) {
var control = document.getElementById("speech");
var complement = (speech.voices.length > 0)?"*":"";
// control.querySelector("i").innerHTML = "OUÇA A REPORTAGEM"+complement;
return true;
} else {
return (speech.voices.length > 0);
};
};
speech.start = function() {
if ('speechSynthesis' in window) {
speech.msg = new SpeechSynthesisUtterance();
if (speech.test_portuguese()) {
speech.show_button();
} else {
window.speechSynthesis.onvoiceschanged = function() {
if (speech.test_portuguese()) {
speech.show_button();
};
};
};
speech.bind_button();
};
};
speech.start();
speech.bind("load",window,speech.start)
How can i solve this problem?
Thanks so much.
The problem is that element is null at that point.
Probably, because there is no element with id="speech", so control in null in this code:
speech.bind_button = function() {
var control = document.getElementById("speech");
speech.bind("click",control,speech.click);
};

chrome Event is not dispatched

I'm having a little problem. I thought I had understood Event Handling, but now I don't think so anymore.
I've created a Chrome Event():
this.onReadLine = new chrome.Event();
this event is dispatched in a function:
this.onReadLine.dispatch(line);
before the dispatch instruction I've tried to log 'line', the argument of the dispatch instruction. No problem, 'line' exists.
Going straight down with the code you will find this part:
connection.onReadLine.addListener(function(line) {
logJSON(line);
});
this is what must be fired every time the onReadLine event is dispatched.
The problem is that the Event onReadLine is only dispatched when I push or release the button '#dimmer1_Chrome_Input' defined at the end of my code.
Where I'm wrong?
My full code here. The parts related to problem are highlighted with ////\///\/\///\\ lines.
// Serial used from Arduino board
const Arduino_COM = 'COM3'; // PC
var SerialConnection = function() {
this.connectionId = -1;
this.lineBuffer = "";
this.boundOnDataReceiving = this.onDataReceiving.bind(this);
this.boundOnDataReceivingError = this.onDataReceivingError.bind(this);
this.onConnect = new chrome.Event();
///////////////////////////\\\\\\\\\\\\\\\/////////////\\\\\\\\\\////////\\\\\\\\\\\\////////\\\\\\\\\\//////PROBLEM
this.onReadLine = new chrome.Event();
///////////////////////////\\\\\\\\\\\\\\\/////////////\\\\\\\\\\////////\\\\\\\\\\\\////////\\\\\\\\\\//////PROBLEM
this.onError = new chrome.Event();
};
SerialConnection.prototype.connect = function(Serial_COM_Port) {
chrome.serial.connect(Serial_COM_Port, this.onConnectComplete.bind(this));
};
SerialConnection.prototype.onConnectComplete = function(connectionInfo) {
if (!connectionInfo) {
log("Connection failed.");
return;
}
this.connectionId = connectionInfo.connectionId;
chrome.serial.onReceive.addListener(this.boundOnDataReceiving);
chrome.serial.onReceiveError.addListener(this.boundOnDataReceivingError);
this.onConnect.dispatch();
};
SerialConnection.prototype.send = function(msg) {
if (this.connectionId < 0) {
throw 'Invalid connection';
}
chrome.serial.send(this.connectionId, String_to_ArrayBuffer(msg), function() {});
};
SerialConnection.prototype.onDataReceiving = function(receiveInfo) {
if (receiveInfo.connectionId !== this.connectionId) {
return;
}
this.lineBuffer += ArrayBuffer_to_String(receiveInfo.data);
var index;
while ((index = this.lineBuffer.indexOf('\n')) >= 0) {
var line = this.lineBuffer.substr(0, index + 1);
console.log(line);
///////////////////////////\\\\\\\\\\\\\\\/////////////\\\\\\\\\\////////\\\\\\\\\\\\////////\\\\\\\\\\//////PROBLEM
this.onReadLine.dispatch(line);
///////////////////////////\\\\\\\\\\\\\\\/////////////\\\\\\\\\\////////\\\\\\\\\\\\////////\\\\\\\\\\//////PROBLEM
this.lineBuffer = this.lineBuffer.substr(index + 1);
}
};
SerialConnection.prototype.onDataReceivingError = function(errorInfo) {
if (errorInfo.connectionId === this.connectionId) {
this.onError.dispatch(errorInfo.error);
}
};
SerialConnection.prototype.disconnect = function() {
if (this.connectionId < 0) {
throw 'Invalid connection';
}
chrome.serial.disconnect(this.connectionId, function() {});
};
var connection = new SerialConnection();
connection.onConnect.addListener(function() {
log('connected to: ' + Arduino_COM);
});
///////////////////////////\\\\\\\\\\\\\\\/////////////\\\\\\\\\\////////\\\\\\\\\\\\////////\\\\\\\\\\//////PROBLEM
connection.onReadLine.addListener(function(line) {
logJSON(line);
});
///////////////////////////\\\\\\\\\\\\\\\/////////////\\\\\\\\\\////////\\\\\\\\\\\\////////\\\\\\\\\\//////PROBLEM
connection.connect(Arduino_COM);
function logJSON(result) {
var response = jQuery.parseJSON( result );
dimmer1_state = response.dimmer1_state;
dimmer1_value = response.dimmer1_value;
SerialIn = response.SerialIn;
dimmer1_Chrome_Input = response.dimmer1_Chrome_Input;
temperature1_value = response.temperature1_value;
s=Math.round(dimmer1_value * 80 / 255 + 20);
hsl='hsl(115,'+s+'%,60%)';
if (dimmer1_state == 0)
{
$('#statusCircle').css('fill','hsl(115,20%,60%)');
}
else
{
$('#statusCircle').css('fill', hsl);
};
// Print led Status to HTML buffer area
messaggio = "dimmer1 state: " + dimmer1_state
+ "<br />dimmer1 value: " + dimmer1_value
+ "<br />SerialIn: " + SerialIn
+ "<br />dimmer1_Chrome_Input: " + dimmer1_Chrome_Input
+ "<br />temperature1_value: " + temperature1_value + " °C";
log(messaggio);
};
function log(msg) {
$('#buffer').html(msg);
};
$(function(){
$('#dimmer1_Chrome_Input') .button()
.mousedown(function() {
connection.send("101");
})
.mouseup(function() {
connection.send("100");
});
});
The code is correct, the error was in another part of program

How do I implement server-sent-events broadcast in totaljs?

Suppose that I have this scenario:
Client1 and client2 are connected in some kind of session1
Simultaneously, client3 and client4 are connected in session2
Now I want to broadcast event "a" to all clients in session1 only.
I've found this example:
https://github.com/totaljs/examples/tree/master/server-sent-events
But I didn't found yet how can I accomplish my goal.
Any ideas, please?
For this case is better to use the WebSocket. The answer for Server-Sents events (SSE):
controllers/default.js:
var session1 = [];
var session2 = [];
exports.install = function() {
F.route('/broadcast/{session}/', sse_broadcast, ['sse']);
};
function sse_broadcast(session) {
var self = this;
switch (session) {
case '1':
session1.push(self);
return;
case '2':
session1.push(self);
return;
}
self.throw400('This session is not supported.');
}
setInterval(function() {
if (session1.length === 0)
return;
var message = U.GUID(20);
var remove = -1;
// broadcast all clients of session1
for (var i = 0, length = session1.length; i < length; i++) {
var session = session1[i];
if (!session.isConnected) {
remove = i;
continue;
}
session.sse({ message: message });
}
if (remove === -1)
return;
session1[remove].destroy();
session1.splice(remove, 1);
}, 1000);
Client-side:
<div id="message"></div>
<script type="text/javascript">
function init() {
var el = document.getElementById('message');
var obj = new EventSource('/broadcast/1/');
obj.onmessage = function(ev) {
el.innerHTML = ev.data + '<br />' + el.innerHTML;
};
obj.addEventListener('end', function() {
el.innerHTML = '----- end<br />' + el.innerHTML;
}, true);
obj.onerror = function(e) {
el.innerHTML = '----- error<br />' + el.innerHTML;
};
}
setTimeout(init, 500);
</script>

Categories

Resources