I have an API that only allows fetching 1000 rows/fetch.
So for instance, if I want to retrieve all the data from the API, the idea is to loop through the response data each time fetching and check the length of it (if responseData.length !== 0, then continue fetching, stop when responseData.length === 0, also increase the firstRow every time it starts the new loop until it reaches to the end (responseData.length === 0)
const fetchDataByRowCount = async (url, token, rowCount = 2, firstRow = 0) => {
// firstRow is the value where the next fetch starts (E.g: 0-999, 1000-1999, etc.).
// rowCount is the value for total rows fetched (E.g: 1000 rows for each fetching time).
const data = await axios({
method: "get",
url: `${url}?rowCount=${rowCount}&firstRow=${firstRow}`,
headers: {
client_id: "",
Authorization: `Bearer ${token}`,
},
});
return data.data;
};
export const paginatedFetch = async (url, type, rowCount = 2, firstRow = 0) => {
let newResponse;
let total = [];
let token = await getToken(type); // stored to reuse token within an hour
do {
if (!token) {
const newToken = await getToken(type);
newResponse = await fetchDataByRowCount(url, newToken);
} else {
newResponse = await fetchDataByRowCount(
url,
token,
(rowCount = 2),
(firstRow = 0)
);
}
// console.log(total, "total");
total = [...total, ...newResponse];
// newResponse = [];
let newFirstRow = firstRow + 1000;
newResponse = await fetchDataByRowCount(
url,
token,
(rowCount = 2),
newFirstRow
);
total = [...total, ...newResponse];
} while (newResponse.length !== 0);
return total;
};
But the problem is that my function didn't exit the do while loop, the newResponse always returns value !==0.
Also, the function only runs once.
Could you guys help me check this, please?
From the code you posted, there's still a thing I can't figure out, and it's rowCount, so I let it as it is in the following "remastered" code:
export const paginatedFetch = async (url, type, rowCount = 2, firstRow = 0) => {
let newResponse;
let total = [];
let token;
let numberOfRows = firstRow;
do {
if (!token) token = await getToken(type);
newResponse = await fetchDataByRowCount(
url,
token,
(rowCount = 2),
numberOfRows
);
total = [...total, ...newResponse];
numberOfRows += 1000;
} while (newResponse.length !== 0);
return total;
};
I got rid of a few things that were redundant, and made the code a tiny bit more efficient with variable assignments etc.
You also mention this:
the newResponse always returns value !==0.
Be careful doing this, as newResponse is initially undefined. Now I never used do...while loops so I don't know exactly what could happen, but it could for example not run at all. Hence the Also, the function only runs once if you're speaking about the paginatedFetch function.
Now if I were to re-write it, I would do it like so:
export const paginatedFetch = async (url, type, rowCount = 2, firstRow = 0) => {
let total = [];
let token;
let numberOfRows = firstRow;
while (true) {
if (!token) token = await getToken(type);
let res = await fetchDataByRowCount(
url,
token,
(rowCount = 2),
numberOfRows
);
total = [...total, ...res];
numberOfRows += 1000;
if (res.length <= 0) break;
}
return total;
};
Again, be careful with while (true), you have to be absolutely sure of what the API returns and res is indeed an array.
The better solution would be the API (if you are the developer) giving an endpoint to count the total number of rows. That way, you would have a way to be absolutely sure how many rows there are and write your code around that.
Related
I used the below code to get the data from API.
I used for loop within for loop and it's taking long time and program stops as time exceeds.
function devicedetails(){
var apikey='YWQ0OWFhYjgtNTY2asiHSNSajiasn'
var todaydate = Utilities.formatDate(new Date(Date.now() - 1000*60*60*24), "UTC", "yyyy-MM-dd")
var thirtydate = Utilities.formatDate(new Date(Date.now() - 1000*60*60*24*30), "UTC", "yyyy-MM-dd")
var cisss= SpreadsheetApp.getActiveSpreadsheet();
var workspacesheet = cisss.getSheetByName("Device");
var lastRows = workspacesheet.getLastRow()+1;
for(var im = 2; im < lastRows; im++)
{
var workspacedata = workspacesheet.getRange('B'+im).getValue();
var encodedata = encodeURIComponent(workspacedata);
var cisurl = "https://testapi.com/v1/workspaceDurationMetrics?workspaceId="+encodedata+"&aggregation=daily&measurement=timeUsed&from="+thirtydate+"T00%3A00%3A00.00Z&to="+todaydate+"T00%3A00%3A00.00Z";
var cisss= SpreadsheetApp.getActiveSpreadsheet()
var ciswsLocation = cisss.getSheetByName("HourlyUsed")
var lastRow = ciswsLocation.getLastRow();
var headers = {
"Content-type": "application/json",
"Authorization": `Bearer ${apikey} `
};
var options = {
"method" : "get",
"headers" : headers
};
var response = UrlFetchApp.fetch(cisurl,options);
var cisjson=response.getContentText();
var cisdata=JSON.parse(cisjson);
for(var i = 0; i < cisdata['items'].length; i++)
{
ciswsLocation.getRange(lastRow+1+i,1).setValue([cisdata["workspaceId"]]);
ciswsLocation.getRange(lastRow+1+i,2).setValue(Utilities.formatDate(new Date([cisdata["items"][i]['start']]), "UTC", "yyyy-MM-dd"));
ciswsLocation.getRange(lastRow+1+i,3).setValue([cisdata["items"][i]['duration']]);
}
}
}
Please help me how to reduce time of execution?
Exactly what liqidkat said.
With that, it may look something like this:
function devicedetails() {
/** Variables **/
const apikey ='YWQ0OWFhYjgtNTY2asiHSNSajiasn'
const todaydate = Utilities.formatDate(new Date(Date.now() - 1000*60*60*24), "UTC", "yyyy-MM-dd")
const thirtydate = Utilities.formatDate(new Date(Date.now() - 1000*60*60*24*30), "UTC", "yyyy-MM-dd")
/** Sheet Variables **/
const cisss = SpreadsheetApp.getActiveSpreadsheet()
const workspacesheet = cisss.getSheetByName("Device")
const workspaceData = workspacesheet.getRange(2, 2, workspacesheet.getLastRow()-1).getValues().flat()
const ciswsLocation = cisss.getSheetByName("HourlyUsed")
const lastRow = ciswsLocation.getLastRow()
/** Request Handling **/
const allRequests = workspaceData.map(i => {
const encodeData = encodeURIComponent(i)
return {
"url": `https://testapi.com/v1/workspaceDurationMetrics?workspaceId=${encodeData}&aggregation=daily&measurement=timeUsed&from=${thirtydate}T00%3A00%3A00.00Z&to=${todaydate}T00%3A00%3A00.00Z`,
"method": "get",
"headers": {
"Content-type": "application/json",
"Authorization": `Bearer ${apikey}`
}
}
})
/** Response Handling **/
const allResponses = UrlFetchApp.fetchAll(allRequests)
const data = allResponses.map(response => {
const cisjson = response.getContentText()
const cisData = JSON.parse(cisjson)
return cisData[`items`].map(i => [
cisdata["workspaceId"],
Utilities.formatDate(new Date(i['start']), "UTC", "yyyy-MM-dd"),
i['duration']
])
})
/** Set data **/
ciswsLocation.getRange(lastRow+1, 3, data.length, data[0].length).setValues(data)
}
See Also:
Array.map()
UrlFetchApp.fetchAll()
Range.setValues()
I will provide my particular approach to this problem, as I think it may be of interest to the community.
Since the OP has not provided the type of response the API provides (and refers that it is for private use), I will use a public API for the example, Google Books APIs in this case. I will also consider that the calls are made to the same API, so the response is assumed to be identical.
I think the problem can be divided into 4 major steps.
Generate the URLs of the calls (depends on the API).
Get the data via UrlFetchApp.fetchAll(Object)
Normalize the data (this is the most critical step, as it depends on the API response). The main point is to obtain an array of arrays (Object[][]) as required for the next step.
Write the data to the sheet using setValues(Object[][]).
Full example here.
Generate URLs
const generateUrl = (authors) => authors.map(author => `https://books.googleapis.com/books/v1/volumes?q=${author}&country=US`)
Get The Data
const fetchAndNormalizeData = (urlList) => {
const resAll = UrlFetchApp.fetchAll(urlList).map(res => res.getContentText())
return resAll.map(normalizeResponse).flat()
}
Normalize The Data
const normalizeResponse = (res) => {
/* This depends on the RestAPI response */
const { items } = JSON.parse(res)
return items.map((book) => {
const { selfLink, volumeInfo: { title, authors, publishedDate } } = book
const parsedAuthors = authors ? authors.join('\n') : ""
return [title, parsedAuthors, selfLink, publishedDate]
})
}
Write to Sheet
const writeToSheet = (data) => {
sS
.getRange(sS.getLastRow() + 1, 1, data.length, data[0].length)
.setValues(data)
console.log("DATA SAVED")
}
Main function
const SS_ID = "<SS_ID>"
const sS = SpreadsheetApp.openById(SS_ID).getSheetByName('BookData')
const main = () => {
const urlList = generateUrl(["Twain", "Joyce", "Pasternak"])
const data = fetchAndNormalizeData(urlList)
writeToSheet(data)
}
In the case of the OP just have to modify the normalizeResponse (callback for the map function) and generateUrl to adapt it to their needs.
Documentation:
Array.prototype.map()
Array.prototype.flat()
Description
I took the liberty of editing your script to replace all getValue/setValue with getValues/setValues. And I moved all variable that only need to be set once outside the loop. First I get all workspacedata, then in side the loop, index into that array for each row. Next since your results are contiguous in rows and columns, I collect all the results and make one call to setValues to place in the sheet.
Although I am not able to test it since input data is not available I believe it will work and will run much faster.
Although Google has made improvements in it performance of getValue/setValue by caching requests I try to organize my spreadsheets so that I will always use getValues/setValues. Same for other getters and setters.
Script
function devicedetails(){
var apikey='YWQ0OWFhYjgtNTY2asiHSNSajiasn'
var todaydate = Utilities.formatDate(new Date(Date.now() - 1000*60*60*24), "UTC", "yyyy-MM-dd")
var thirtydate = Utilities.formatDate(new Date(Date.now() - 1000*60*60*24*30), "UTC", "yyyy-MM-dd")
var cisss= SpreadsheetApp.getActiveSpreadsheet();
var workspacesheet = cisss.getSheetByName("Device");
var lastRows = workspacesheet.getLastRow()-1;
var workspacedata = workspacesheet.getRange(2,2,lastRows-1,1).getValues();
var ciswsLocation = cisss.getSheetByName("HourlyUsed")
for(var im = 0; im < lastRows; im++) {
var encodedata = encodeURIComponent(workspacedata[im][0]);
var cisurl = "https://testapi.com/v1/workspaceDurationMetrics?workspaceId="+encodedata+"&aggregation=daily&measurement=timeUsed&from="+thirtydate+"T00%3A00%3A00.00Z&to="+todaydate+"T00%3A00%3A00.00Z";
var lastRow = ciswsLocation.getLastRow();
var headers = {
"Content-type": "application/json",
"Authorization": `Bearer ${apikey} `
};
var options = {
"method" : "get",
"headers" : headers
};
var response = UrlFetchApp.fetch(cisurl,options);
var cisjson=response.getContentText();
var cisdata=JSON.parse(cisjson);
var results = [];
for(var i = 0; i < cisdata['items'].length; i++) {
let row = []
row[0] = cisdata["workspaceId"];
row[1] = Utilities.formatDate(new Date(cisdata["items"][i]['start']), "UTC", "yyyy-MM-dd");
row[2] = cisdata["items"][i]['duration'];
results.push(row);
}
ciswsLocation.getRange(lastRow+1,1,results.length,results[0].length).setValues(results);
}
}
I am using the Math.max function to process an array using the spread function, but am still receiving a NaN error. Any help into this matter would be greatly appreciated.
import finnhub from 'https://cdn.skypack.dev/finnhub';
const api_key = finnhub.ApiClient.instance.authentications['api_key'];
api_key.apiKey = "c5cam0iad3ib55bb0ajg"
const finnhubClient = new finnhub.DefaultApi()
let stockTicker = prompt("Please enter the symbol of the stock to chart:");
//Finance API Function Call Below
finnhubClient.stockCandles(stockTicker, "D", ((Math.trunc(Date.now() / 1000)) - 8640000), Date.now(), (error, data, response) => {
let dataSetDayHigh = []
let dataSetDayRange = 1
for (dataSetDayRange = 1; dataSetDayRange < 41; dataSetDayRange = dataSetDayRange + 1){
dataSetDayHigh[dataSetDayRange] = Math.trunc(data.h[dataSetDayRange])
}
let yAxisRangeUpper = Math.max.apply(null, dataSetDayHigh)
console.log("Data Set Day High Values = " + yAxisRangeUpper)
I found this script somewhere... might have been within npm's source code actually... not sure, all I know is I did not write it myself.. but looking at it I can't help but wonder if it or a similar refactor of the following code could allow a quick web crawl of a static site and return a list of url's that lead to the pages that have the most hits on the search term... I don't need anything fancy like fuzzy search nor am I asking anyone to write the code for me so much as I would like a second (or third) pair of eyes to look at this code and decide if there's any potential in this to implement simple full text search.
const fs = require("fs");
const path = require("path");
const npm = require("./npm.js");
const color = require("ansicolors");
const output = require("./utils/output.js");
const usageUtil = require("./utils/usage.js");
const { promisify } = require("util");
const glob = promisify(require("glob"));
const readFile = promisify(fs.readFile);
const didYouMean = require("./utils/did-you-mean.js");
const { cmdList } = require("./utils/cmd-list.js");
const usage = usageUtil("help-search", "npm help-search <text>");
const completion = require("./utils/completion/none.js");
const npmUsage = require("./utils/npm-usage.js");
const cmd = (args, cb) =>
helpSearch(args)
.then(() => cb())
.catch(cb);
const helpSearch = async (args) => {
if (!args.length) throw usage;
const docPath = path.resolve(__dirname, "..", "docs/content");
const files = await glob(`${docPath}/*/*.md`);
const data = await readFiles(files);
const results = await searchFiles(args, data, files);
// if only one result, then just show that help section.
if (results.length === 1) {
return npm.commands.help([path.basename(results[0].file, ".md")], (er) => {
if (er) throw er;
});
}
const formatted = formatResults(args, results);
if (!formatted.trim()) npmUsage(false);
else {
output(formatted);
output(didYouMean(args[0], cmdList));
}
};
const readFiles = async (files) => {
const res = {};
await Promise.all(
files.map(async (file) => {
res[file] = (await readFile(file, "utf8"))
.replace(/^---\n(.*\n)*?---\n/, "")
.trim();
})
);
return res;
};
const searchFiles = async (args, data, files) => {
const results = [];
for (const [file, content] of Object.entries(data)) {
const lowerCase = content.toLowerCase();
// skip if no matches at all
if (!args.some((a) => lowerCase.includes(a.toLowerCase()))) continue;
const lines = content.split(/\n+/);
// if a line has a search term, then skip it and the next line.
// if the next line has a search term, then skip all 3
// otherwise, set the line to null. then remove the nulls.
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
const nextLine = lines[i + 1];
let match = false;
if (nextLine) {
match = args.some((a) =>
nextLine.toLowerCase().includes(a.toLowerCase())
);
if (match) {
// skip over the next line, and the line after it.
i += 2;
continue;
}
}
match = args.some((a) => line.toLowerCase().includes(a.toLowerCase()));
if (match) {
// skip over the next line
i++;
continue;
}
lines[i] = null;
}
// now squish any string of nulls into a single null
const pruned = lines.reduce((l, r) => {
if (!(r === null && l[l.length - 1] === null)) l.push(r);
return l;
}, []);
if (pruned[pruned.length - 1] === null) pruned.pop();
if (pruned[0] === null) pruned.shift();
// now count how many args were found
const found = {};
let totalHits = 0;
for (const line of pruned) {
for (const arg of args) {
const hit =
(line || "").toLowerCase().split(arg.toLowerCase()).length - 1;
if (hit > 0) {
found[arg] = (found[arg] || 0) + hit;
totalHits += hit;
}
}
}
const cmd = "npm help " + path.basename(file, ".md").replace(/^npm-/, "");
results.push({
file,
cmd,
lines: pruned,
found: Object.keys(found),
hits: found,
totalHits,
});
}
// sort results by number of results found, then by number of hits
// then by number of matching lines
// coverage is ignored here because the contents of results are
// nondeterministic due to either glob or readFiles or Object.entries
return results
.sort(
/* istanbul ignore next */ (a, b) =>
a.found.length > b.found.length
? -1
: a.found.length < b.found.length
? 1
: a.totalHits > b.totalHits
? -1
: a.totalHits < b.totalHits
? 1
: a.lines.length > b.lines.length
? -1
: a.lines.length < b.lines.length
? 1
: 0
)
.slice(0, 10);
};
const formatResults = (args, results) => {
const cols = Math.min(process.stdout.columns || Infinity, 80) + 1;
const out = results
.map((res) => {
const out = [res.cmd];
const r = Object.keys(res.hits)
.map((k) => `${k}:${res.hits[k]}`)
.sort((a, b) => (a > b ? 1 : -1))
.join(" ");
out.push(
" ".repeat(Math.max(1, cols - out.join(" ").length - r.length - 1))
);
out.push(r);
if (!npm.flatOptions.long) return out.join("");
out.unshift("\n\n");
out.push("\n");
out.push("-".repeat(cols - 1) + "\n");
res.lines.forEach((line, i) => {
if (line === null || i > 3) return;
if (!npm.color) {
out.push(line + "\n");
return;
}
const hilitLine = [];
for (const arg of args) {
const finder = line.toLowerCase().split(arg.toLowerCase());
let p = 0;
for (const f of finder) {
hilitLine.push(line.substr(p, f.length));
const word = line.substr(p + f.length, arg.length);
const hilit = color.bgBlack(color.red(word));
hilitLine.push(hilit);
p += f.length + arg.length;
}
}
out.push(hilitLine.join("") + "\n");
});
return out.join("");
})
.join("\n");
const finalOut =
results.length && !npm.flatOptions.long
? "Top hits for " +
args.map(JSON.stringify).join(" ") +
"\n" +
"—".repeat(cols - 1) +
"\n" +
out +
"\n" +
"—".repeat(cols - 1) +
"\n" +
"(run with -l or --long to see more context)"
: out;
return finalOut.trim();
};
module.exports = Object.assign(cmd, { usage, completion });
Depending on how your site is structured and generated, I don't see why a client-side text search wouldn't work. I wouldn't recommend crawling the site on the client-side, so it would probably be better to generate a data file at build time, and then basing the search off that.
If your static site is generated with a static site generator, you might be able to get the static site generator to create a JSON file with all the content. Otherwise, if it's just static assets, you could probably create a script to read your content and create the data file that way.
There are also plenty of libraries available that do searching of a JSON object, such as fuse.js.
The main concern with a client-side search is the amount of text to search. If you have lots of content, the client would have to load everything into memory, which may be a concern, though you'll have to test things out for your particular use case.
Using the code below, I am able to retrieve user names from jsonplaceholder.com.
I am able to see ALL the user names using console.log, I cannot output ALL these items onscreen!
The output is only ONE of the array items.
Using userName[0], userName[1] outputs the same item?!
Using userName[0].textContent = info[1]; outputs the first letter of a user name.
Any help appreciated.
Thank you!
(function() {
// set api addresses
const usersJSON = "https://jsonplaceholder.typicode.com/users";
const commentsJSON = "https://jsonplaceholder.typicode.com/comments";
// user function //
// IIFE - get, parse, catch
(function getData() {
// get data
fetch(usersJSON)
.then(function(response) {
return response.json();
})
//parse to json
.then(function(data) {
// user name
for (let i = 0; i < data.length; i++) {
// generate random number 1 - 10
var num = Math.floor(Math.random() * 10);
displayName(data[num].name);
}
})
// display user name
function displayName(info) {
const userName = document.getElementsByClassName("userName");
for (let i = 0; i < userName.length; i++) {
userName[0].textContent = info;
userName[1].textContent = info;
}
}
})();
})();
The first thing you might want to do is use an ID, and grab the item using getElementById, then you can remove the loop to make the function simpler (unless you have multiple classes you want to append the names to)
Your issue is that you are using =, which overwrites the previously set text/data, which is why you are seeing one item. You are adding the first item from the array, then on the next loop iteration you replace it with the next item in the array.
Here are two different solutions to fix your problem:
Use += to append data instead of overwrite it:
function displayName(info) {
const userName = document.getElementById("users");
userName.innerHTML += '<p>' + info + '</p>';
}
Use appendChild to append data instead of overwrite it:
function displayName(info) {
const userName = document.getElementById("users")
const p = document.createElement('p')
p.textContent = info
userName.appendChild(p)
}
(function() {
// set api addresses
const usersJSON = "https://jsonplaceholder.typicode.com/users";
const commentsJSON = "https://jsonplaceholder.typicode.com/comments";
// user function //
// IIFE - get, parse, catch
(function getData() {
// get data
fetch(usersJSON)
.then(function(response) {
return response.json();
})
//parse to json
.then(function(data) {
// user name
for (let i = 0; i < data.length; i++) {
// generate random number 1 - 10
var num = Math.floor(Math.random() * 10);
displayName(data[num].name);
}
})
// display user name
function displayName(info) {
const userName = document.getElementById("users");
userName.innerHTML += '<p>' + info + '</p>';
}
})();
})();
<div id="users"></div>
I am just starting with NetSuite and trying to pull all items with details using Restlet. With some research, I am able to pull all the items but the way I am doing now is not straightforward. I first pull the all ids of item using nlapiSearchRecord and loop through each id to get details of each item using nlapiLoadRecord and added to array. This way, it is taking to much time. Is there other way to pull all items with their details? Below is my code.
function getAllIDs() {
return nlapiSearchRecord('item', null, null, null);
}
function getRecord() {
var all_IDs = getAllIDs();
var len=all_IDs.length;
var result =new Array();
for(var i=0;i<all_IDs.length;i++) {
if(all_IDs[i].getRecordType()==="inventoryitem")
result[i]=nlapiLoadRecord(all_IDs[i].getRecordType(),all_IDs[i].id)
}
return result;
}
You can use what #Krypton suggested but you will always get 1000 results at max.
Try following if you have requirement to get more than 1000 (using Suitescript 2.0):
var columns = [];
var filters = [['isinactive', 'is', 'F']];
columns.push(search.createColumn({ name: "itemid"}));
columns.push(search.createColumn({ name: "displayname"}));
columns.push(search.createColumn({ name: "salesdescription"}));
columns.push(search.createColumn({ name: "baseprice"}));
var inventoryitemSearch = search.create({
type: search.Type.INVENTORY_ITEM, //Change the type as per your requirement
filters: filters,
columns: columns
});
var arrResults = [];
var count = 1000;
var startIndex = 0;
var endIndex = 1000;
var resultSet= inventoryitemSearch.run();
while (count == 1000) {
var results = resultSet.getRange(startIndex, endIndex);
arrResults = arrResults.concat(results);
startIndex = endIndex;
endIndex += 1000;
count = results.length;
}
log.debug({title: 'arrResults ', details: arrResults });
You can include the details you want in the search. So, for example, you can include an nlobjSearchFilter so that the search only returns inventory items, and add an nlobjSearchColumn for each field you want to see in the details. This way all the details you want to see are returned with the search and you can loop through the results to do what you want with them without loading every record individually - which will be where most of the performance hit is happening.
An example:
var inventoryitemSearch = nlapiSearchRecord("inventoryitem",null,
[
["type","anyof","InvtPart"]
],
[
new nlobjSearchColumn("itemid",null,null).setSort(false),
new nlobjSearchColumn("displayname",null,null),
new nlobjSearchColumn("salesdescription",null,null),
new nlobjSearchColumn("baseprice",null,null)
]
);
Then you can loop through the results to get details:
var name, displayName, description, price;
for ( var i = 0; inventoryitemSearch != null && i < searchresults.length; i++ ) {
var searchresult = inventoryitemSearch[ i ];
name = searchresult.getValue( 'itemid' );
displayName = searchresult.getValue( 'displayname' );
description = searchresult.getValue( 'salesdescription' );
price = searchresult.getValue( 'baseprice' );
}
There is a lot to learn about scripted searches in NetSuite, so I'd recommend starting here (NetSuite login required) and follow the links and keep reading / experimenting until your eyes glaze over.
I just like to use a generic function that accepts a search object...
const getAllResults = searchObj => {
try {
const Resultset = searchObj.run()
const maxResults = searchObj.runPaged().count
let ResultSubSet = null
let index = 0
const maxSearchReturn = 1000
let AllSearchResults = []
do {
let start = index
let end = index + maxSearchReturn
if (maxResults && maxResults <= end) {
end = maxResults
}
ResultSubSet = Resultset.getRange(start, end)
if (ResultSubSet.length === 0) {
break
}
// we could intriduce a record processor to lighetn up the load
AllSearchResults = AllSearchResults.concat(ResultSubSet)
index = index + ResultSubSet.length
if (maxResults && maxResults == index) {
break
}
} while (ResultSubSet.length >= maxSearchReturn)
return AllSearchResults
} catch (e) {
log.error(`getAllResults()`, `error : ${e}`)
}
}