I'm using API Jira
I'm doing some functions but before to use function, I need to verified if a value exist or not
If he exists so I can launch functions else do nothing.
I'm doing this :
// Call the file functions.js
var functions = require('./functions.js')
/*
Function getAllIssueForSCII displays all the issues in the form of a JSON and that browses all the keys that are in the SCII project
Function pushInitialization initializes the computer score card to 80 on Jira
*/
functions.getAllIssueForSCII().then(function(json){
for (let i=0; i<json.issues.length;i++){
if(json.issues[i].fields.customfield_14038 = null){ // i'm doing this
console.log(json.issues[i].key);
functions.pushInitialization(json.issues[i].key);
}
}
});
/*
A delay is added so that Jira can correctly recover the value 80.
Thanks to this value, we can do all the calculation (Function calculate)
Function pushComputerScoreCard push the final value into the computer score card.
Function generateJSON generates a JSON.
Function replaceCharacter solve the problem of array inside the JSON
*/
setTimeout(function() {
functions.getAllIssueForSCII().then(function(json){
for (let i=0; i<json.issues.length;i++){
functions.calculate(json.issues[i]);
functions.pushComputerScoreCard(json.issues[i].key);
functions.generateJSON(json.issues[i].key);
functions.replaceCharacter();
}
});
}, 1000)
My problem: After the settimeout, he recover value already exist and do the calcul...
I need to verified my condition in all of the script .
Thanks for your help
You are assigning null value in an if condition:
if(json.issues[i].fields.customfield_14038 = null){ // i'm doing this
You need to compare values:
if(json.issues[i].fields.customfield_14038 === null){ // You need to do this:
I've implemented a call center in Salesforce with Twilio client JavaScript SDK. I'm trying to save the call record information in Salesforce and I'm using the connection.parameters.CallSid to identify the correct record when the recording call back fires. However my CallSid in client side is getting changed automatically sometimes to a different format and hence the recording call back function can't find the Salesforce end record to update it with the RecordingUrl. Have anyone experienced this before or appreciate any guidance.
Below is my JavaScript code. To be more specific, in the startCall function console.log print the CallSid correctly but when goes to saveLog function it's having a different value with a different format and hence the saved record having an incorrect value.
<script type="text/javascript">
Twilio.Device.setup("{! token }");
var callerSid; // hold the Twilio generated CallSid unique to this call
var parentId; // hold the parent record being called in order to associate as the parent of task being logged for the call
var newTaskId; // hold the id of newly created task
// function to fire when click 2 dial executes
var startCall = function (payload) {
sforce.opencti.setSoftphonePanelVisibility({visible: true}); //pop up CTI softphone
parentId = payload.recordId; // the record that the phone number being called belongs to
var cleanednumber = cleanFormatting(payload.number);
params = {"PhoneNumber": cleanednumber};
var connection = Twilio.Device.connect(params);
callerSid = connection.parameters; // track the unique Twilio CallSid
console.log('clk2dial : ', callerSid.CallSid); **// here it logs correcly as CAc57d05994cd69498e0353a5f4b07f2dc**
setTimeout(function(){
saveLog(); // save the call information in a Task record
}, 2000
);
};
//OpenCTI!!
sforce.opencti.enableClickToDial();
sforce.opencti.onClickToDial({listener : startCall}); // click 2 dial
function cleanFormatting(number) {
//changes a SFDC formatted US number, which would be 415-555-1212 into a twilio understanble number 4155551212
return number.replace(' ','').replace('-','').replace('(','').replace(')','').replace('+','');
}
// save the call information in a Task record
function saveLog() {
var keyPrefix;
var taskToSave;
console.log('callerSid.CallSid : ', callerSid.CallSid); **// surprisingly here it logs as TJSce253eb4-c2a0-47f3-957f-8178e95162aa**
if(parentId != null) {
keyPrefix = parentId.slice(0,3);
}
if(keyPrefix != null && keyPrefix == '003') {
taskToSave = {WhoId:parentId, Type: "Call", CallObject: callerSid.CallSid, entityApiName: "Task", Subject: "Call log"};
} else {
taskToSave = {WhatId:parentId, Type: "Call", CallObject: callerSid.CallSid, entityApiName: "Task", Subject: "Call log"};
}
sforce.opencti.saveLog({value:taskToSave, callback: saveLogCallBack});
}
// call back function for saving the call information in a Task record
var saveLogCallBack = function(response) {
if(response.success) {
newTaskId = response.returnValue.recordId;
console.log('save success! : ', newTaskId);
} else {
console.error('Error saving : ', response.errors);
}
}
</script>
Answering my own question as I got through this. I registered a function for Twilio.Device.connect and in the call back function retrieved the CallSid. Along with that I've updated my click 2 dial function as well accordigly as below. However I was unable to find this approach in Twilio documentation and any comments are welcome.
// function to fire when click 2 dial executes
var startCall = function (payload) {
sforce.opencti.setSoftphonePanelVisibility({visible: true}); //pop up CTI softphone
parentId = payload.recordId; // the record that the phone number being called belongs to
var cleanednumber = cleanFormatting(payload.number);
params = {"PhoneNumber": cleanednumber};
Twilio.Device.connect(params);
};
//OpenCTI!!
sforce.opencti.enableClickToDial();
sforce.opencti.onClickToDial({listener : startCall}); // click 2 dial
// registered a function for Twilio Device connect
Twilio.Device.connect(function(response) {
callSid = response.parameters; // track the unique Twilio CallSid
// nothing change in save function so not posting again
saveLog(); // save the call information in a Task record
});
Using the Google Geocoder v3, if I try to geocode 20 addresses, I get an OVER_QUERY_LIMIT unless I time them to be ~1 second apart, but then it takes 20 seconds before my markers are all placed.
Is there any other way to do it, other than storing the coordinates in advance?
No, there is not really any other way : if you have many locations and want to display them on a map, the best solution is to :
fetch the latitude+longitude, using the geocoder, when a location is created
store those in your database, alongside the address
and use those stored latitude+longitude when you want to display the map.
This is, of course, considering that you have a lot less creation/modification of locations than you have consultations of locations.
Yes, it means you'll have to do a bit more work when saving the locations -- but it also means :
You'll be able to search by geographical coordinates
i.e. "I want a list of points that are near where I'm now"
Displaying the map will be a lot faster
Even with more than 20 locations on it
Oh, and, also (last but not least) : this will work ;-)
You will less likely hit the limit of X geocoder calls in N seconds.
And you will less likely hit the limit of Y geocoder calls per day.
You actually do not have to wait a full second for each request. I found that if I wait 200 miliseconds between each request I am able to avoid the OVER_QUERY_LIMIT response and the user experience is passable. With this solution you can load 20 items in 4 seconds.
$(items).each(function(i, item){
setTimeout(function(){
geoLocate("my address", function(myLatlng){
...
});
}, 200 * i);
}
Unfortunately this is a restriction of the Google maps service.
I am currently working on an application using the geocoding feature, and I'm saving each unique address on a per-user basis. I generate the address information (city, street, state, etc) based on the information returned by Google maps, and then save the lat/long information in the database as well. This prevents you from having to re-code things, and gives you nicely formatted addresses.
Another reason you want to do this is because there is a daily limit on the number of addresses that can be geocoded from a particular IP address. You don't want your application to fail for a person for that reason.
I'm facing the same problem trying to geocode 140 addresses.
My workaround was adding usleep(100000) for each loop of next geocoding request. If status of the request is OVER_QUERY_LIMIT, the usleep is increased by 50000 and request is repeated, and so on.
And of cause all received data (lat/long) are stored in XML file not to run request every time the page is loading.
EDIT:
Forgot to say that this solution is in pure js, the only thing you need is a browser that supports promises https://developer.mozilla.org/it/docs/Web/JavaScript/Reference/Global_Objects/Promise
For those who still needs to accomplish such, I've written my own solution that combines promises with timeouts.
Code:
/*
class: Geolocalizer
- Handles location triangulation and calculations.
-- Returns various prototypes to fetch position from strings or coords or dragons or whatever.
*/
var Geolocalizer = function () {
this.queue = []; // queue handler..
this.resolved = [];
this.geolocalizer = new google.maps.Geocoder();
};
Geolocalizer.prototype = {
/*
#fn: Localize
#scope: resolve single or multiple queued requests.
#params: <array> needles
#returns: <deferred> object
*/
Localize: function ( needles ) {
var that = this;
// Enqueue the needles.
for ( var i = 0; i < needles.length; i++ ) {
this.queue.push(needles[i]);
}
// return a promise and resolve it after every element have been fetched (either with success or failure), then reset the queue.
return new Promise (
function (resolve, reject) {
that.resolveQueueElements().then(function(resolved){
resolve(resolved);
that.queue = [];
that.resolved = [];
});
}
);
},
/*
#fn: resolveQueueElements
#scope: resolve queue elements.
#returns: <deferred> object (promise)
*/
resolveQueueElements: function (callback) {
var that = this;
return new Promise(
function(resolve, reject) {
// Loop the queue and resolve each element.
// Prevent QUERY_LIMIT by delaying actions by one second.
(function loopWithDelay(such, queue, i){
console.log("Attempting the resolution of " +queue[i-1]);
setTimeout(function(){
such.find(queue[i-1], function(res){
such.resolved.push(res);
});
if (--i) {
loopWithDelay(such,queue,i);
}
}, 1000);
})(that, that.queue, that.queue.length);
// Check every second if the queue has been cleared.
var it = setInterval(function(){
if (that.queue.length == that.resolved.length) {
resolve(that.resolved);
clearInterval(it);
}
}, 1000);
}
);
},
/*
#fn: find
#scope: resolve an address from string
#params: <string> s, <fn> Callback
*/
find: function (s, callback) {
this.geolocalizer.geocode({
"address": s
}, function(res, status){
if (status == google.maps.GeocoderStatus.OK) {
var r = {
originalString: s,
lat: res[0].geometry.location.lat(),
lng: res[0].geometry.location.lng()
};
callback(r);
}
else {
callback(undefined);
console.log(status);
console.log("could not locate " + s);
}
});
}
};
Please note that it's just a part of a bigger library I wrote to handle google maps stuff, hence comments may be confusing.
Usage is quite simple, the approach, however, is slightly different: instead of looping and resolving one address at a time, you will need to pass an array of addresses to the class and it will handle the search by itself, returning a promise which, when resolved, returns an array containing all the resolved (and unresolved) address.
Example:
var myAmazingGeo = new Geolocalizer();
var locations = ["Italy","California","Dragons are thugs...","China","Georgia"];
myAmazingGeo.Localize(locations).then(function(res){
console.log(res);
});
Console output:
Attempting the resolution of Georgia
Attempting the resolution of China
Attempting the resolution of Dragons are thugs...
Attempting the resolution of California
ZERO_RESULTS
could not locate Dragons are thugs...
Attempting the resolution of Italy
Object returned:
The whole magic happens here:
(function loopWithDelay(such, queue, i){
console.log("Attempting the resolution of " +queue[i-1]);
setTimeout(function(){
such.find(queue[i-1], function(res){
such.resolved.push(res);
});
if (--i) {
loopWithDelay(such,queue,i);
}
}, 750);
})(that, that.queue, that.queue.length);
Basically, it loops every item with a delay of 750 milliseconds between each of them, hence every 750 milliseconds an address is controlled.
I've made some further testings and I've found out that even at 700 milliseconds I was sometimes getting the QUERY_LIMIT error, while with 750 I haven't had any issue at all.
In any case, feel free to edit the 750 above if you feel you are safe by handling a lower delay.
Hope this helps someone in the near future ;)
I have just tested Google Geocoder and got the same problem as you have.
I noticed I only get the OVER_QUERY_LIMIT status once every 12 requests
So I wait for 1 second (that's the minimum delay to wait)
It slows down the application but less than waiting 1 second every request
info = getInfos(getLatLng(code)); //In here I call Google API
record(code, info);
generated++;
if(generated%interval == 0) {
holdOn(delay); // Every x requests, I sleep for 1 second
}
With the basic holdOn method :
private void holdOn(long delay) {
try {
Thread.sleep(delay);
} catch (InterruptedException ex) {
// ignore
}
}
Hope it helps
This worked well for me, after intermittent trial and error over the past couple days. I am using react instant-search-hooks via Algolia with Nextjs and Sanity for a new jobs site for a large company.
Postal Code is a facet for filtering/sorting/query matching that is defined in the algolia index. In another script file, I map out all of these facets (postal code, city, etc); Now that I have 100 returned files they can be mapped out by iterating through a mapped asynchronous import and the lat/lng coords matched to the corresponding zip codes defining a job posting (there are ~2500 postings but only ~100 zip codes to narrow down the coordinates of)
import * as dotenv from "dotenv";
dotenv.config();
import {
googleNetwork,
axiosConfig as googleAxiosConfig
} from "../utils/google-axios";
import JSONData from "../../public/data/postalCode/2022/05/26.json";
import fs from "fs";
import { join } from "path";
import type { GeneratedGeolocData } from "../types/algolia";
import { timezoneHelper } from "../utils/timezone-helper";
import { Unenumerate } from "../types/helpers";
let i = 0;
i < JSONData.postalCodes.facetHits.length;
i++;
const getGeoCode = (
record: Unenumerate<typeof JSONData.postalCodes.facetHits>
) =>
function () {
return JSONData.postalCodes.facetHits.map(async (data = record, u) => {
const googleBase = process.env.NEXT_PUBLIC_GOOGLE_MAPS_BASE_PATH ?? "";
const googleApiKey =
process.env.NEXT_PUBLIC_TAKEDA_JOBS_GOOGLE_SERVICES ?? "";
const params: (string | undefined)[][] = [
["address", data.value],
["key", googleApiKey]
];
const query = params
.reduce<string[]>((arr, [k, v]) => {
if (v) arr.push(`${k}=${encodeURIComponent(v)}`);
return arr;
}, [])
.join("&");
return await googleNetwork("GET")
.get(`${googleBase}geocode/json?${query}`, googleAxiosConfig)
.then(dat => {
const geoloc = dat.data as GeneratedGeolocData;
const {
[0]: Year,
[2]: Month,
[4]: Day
} = new Date(Date.now())
.toISOString()
.split(/(T)/)[0]
.split(/([-])/g);
const localizedTimestamp = timezoneHelper({
dateField: new Date(Date.now()),
timezone: "America/Chicago"
});
return setTimeout(
() =>
fs.appendFileSync(
join(
process.cwd(),
`public/data/geoloc/${Year}/${Month}/${Day}-${[i]}.json`
),
JSON.stringify(
{
generated: localizedTimestamp,
_geoloc: {
postalCode: data.value,
geolocation: geoloc
}
},
null,
2
)
),
1000
);
});
});
};
getGeoCode(JSONData.postalCodes.facetHits[i]);
It took a lot less time than anticipated -- under 4 seconds for 100 unique results to generate
Context on the Unenumerate type -- Unenumerate strips the internal repeating unit within an array:
type Unenumerate<T> = T extends Array<infer U> ? U : T;
I'm using the unirest library to fetch all of the data from an api, which is split up by offset and limits parameters, and has no finite number of results.
I'm using a while condition to iterate through the data and at the point where no results are returned, I end the loop by setting an 'incomplete' variable to false.
But for some reason, when I run the following code nothing happens (as in no data is added to my database and nothing is outputted to the console) until I get the 'call_and_retry_last allocation failed' error (assuming this happens when a while loop goes on too long). But when I remove the while condition altogether the code works fine.
Is there a particular reason why this isn't working?
Here's my code:
var limit = 50,
offset = 0,
incomplete = true;
while (incomplete) {
// make api call
unirest.get("https://www.theapiurl.com")
.header("Accept", "application/json")
.send({ "limit": limit, "offset": offset })
.end(function (result) {
// parse the json response
var data = JSON.parse(result.raw_body);
// if there is data
if( data .length > 0 )
{
// save the api data
// + increase the offset value for next set of data
offset += limit;
}
else
{
// if there is no data left, end loop
incomplete = false;
console.log("finished!");
}
});
}
You can use recurrcive function as
function getServerData(offset){
//Your api service with callback.if there is a data then call it again with the new offset.
}
function getServerData(1);
Hi I am making one site in RubyOnRails. I am having problem in showing some content at client side. What I want to do is like news where after every 10 sec., news would change. What I have done is I have make an ajax which fetch the news from my server, server returns array in json response. Now I have all the news array at client side I want to show one by one in 10 sec interval.
I have tried with this code but its not showing anything except last news.
function get_news(){
$.ajax({
url : "my.json",
success:function(data){
// data is array of json like [{"html" : "dfsf"},{"html":"ddd"}]
news_change(data);
}
});
}
get_news();
function news_change(feed){
$.each(feed,function(index,f){
var feed_html = f.html;
$('#news_div').fadeOut('slow', function(){
$('#news_div').html(feed_html);
$('#news_div').fadeIn('slow');
});
sleep(10000);
});
}
function sleep(milliseconds) {
var start = new Date().getTime();
for (var i = 0; i < 1e7; i++) {
if ((new Date().getTime() - start) > milliseconds){
break;
}
}
}
when I execute this code it only shows news which is last. And also it hang my browser. Please suggest me why is this because ?
Use setTimeout or setInterval, which execute code asynchronously after a certain number of milliseconds. Looping is synchronous and locks the browser while it executes.
// this will execute get_news every 10,000 ms
var newsInterval = setInterval(get_news, 10000);
// if you ever want to stop the interval, use clearInterval
clearInterval(newsInterval);
Note that get_news performs an ajax call, which could take some time, meaning that your news will not update exactly every 10 seconds.
EDIT: to iterate through the news array every 10 seconds, you'd pass the news_change function to setInterval:
var newsInterval;
function get_news(){
$.ajax({
url : "my.json",
success:function(data) {
newsInterval = setInterval(function () {
news_change(data);
}, 10000);
}
});
}
get_news();
// if you ever want to stop the interval, use clearInterval
clearInterval(newsInterval);