Script must log postback information about call detail from zvonok.com to google spreadsheets. I has write function which only append row to sreadsheet - no update or modify of any cell in code and during few manual test calls rows has been append correct, but when my client began his usual call campaign, calls and postbacks going very often one after other, values in last row began changing few times and in some cases leave strange values
I seen behavior like this first time made short video record:
https://youtu.be/0_H_mVAbp4g
here is one column with strange value
2103052006092385
2,10305E+15
210305412464544
I have found 9 cases from 248 rows.
Client has show me excel from his user cabinet, totally was maded 5649 calls, so in google spreadsheets must be 5649 rows instead 248.
function getJsonFromUrl(url) {
var query = url;
var result = {};
if (query == undefined){
return result;
}
query.split("&").forEach(function(part) {
var item = part.split("=");
result[item[0]] = decodeURIComponent(item[1]);
});
return result;
}
function doGet(e){
const ctCompl = 'ct_completed';
var doc = SpreadsheetApp.openById(SHEET_KEY);
var sheet = doc.getSheetByName(SHEET_NAME);
var row = [];
if(typeof e !== undefined){
mArr = getJsonFromUrl(e.queryString);
for (i in mArr) if( i == ctCompl) {
row.push(convTimeLong(mArr[i]));
} else
row.push(mArr[i]);
sheet.appendRow(row);
} else {
sheet.appendRow(['e undefined!']);
}
SpreadsheetApp.flush();
return handleResponse(e)
}
function convTimeLong(dateTime) {
let d = new Date();
let dt=dateTime.replace('+', 'T');
try {
var res = Utilities.formatDate(d,"GMT+2", "dd.MM.yyyy HH:mm");
return res
} catch(e){
return dateTime; }
}
executions dashboard show status "completed' everywhere, execution time longest - 1.688 s
Client has set delay 5 second between call's, right now I don't now is percent of lost postback's decreased after delay was set or not, but it still very high.
https://youtu.be/0_H_mVAbp4g
In general, using Google Sheets as a database is a bad idea. It's not designed for this so it could fail really bad. Using a proper database will make everything much, much easier. If you are using the spreadsheet to then cook the data, I'd advise to use a function that imports data like IMPORTXML (see reference).
That being said, if you insist on using Sheets, you could try using locks:
function appendRow(sheet, row) {
const lock = LockService.getScriptLock()
while (!lock.tryLock(100000)) /* Spin the lock until it gets aquired */;
try {
sheet.appendRow(row)
SpreadsheetApp.flush()
} finally {
lock.releaseLock()
}
}
To use it, you only need to pass the sheet and the values to add: sheet.appendRow(row) to appendRow(sheet, row).
It will make sure that entries don't get overridden. Note that this will slow down the code a lot and the script can time out if there are a lot of requests.
Related
I'm having some trouble with my script. Basically, that works getting data from one course and inputting the values to one Sheet. That's working perfectly.
But when one of my students input 'enter' command at that course, I have trouble to read it in Excel. SO, I have to find and replace the enter at Google Spreadsheet and change it for "; ".
Works perfectly doing it manually, but I can't do it by script.
Here the piece:
// 1. Enter sheet name where data is to be written below
var SHEET_NAME = "DATA";
// 2. Run > setup
//
// 3. Publish > Deploy as web app
// - enter Project Version name and click 'Save New Version'
// - set security level and enable service (most likely execute as 'me' and access 'anyone, even anonymously)
//
// 4. Copy the 'Current web app URL' and post this in your form/script action
//
// 5. Insert column names on your destination sheet matching the parameter names of the data you are passing in (exactly matching case)
var SCRIPT_PROP = PropertiesService.getScriptProperties(); // new property service
// If you don't want to expose either GET or POST methods you can comment out the appropriate function
function doGet(e){
return handleResponse(e);
}
function doPost(e){
return handleResponse(e);
}
function handleResponse(e) {
// shortly after my original solution Google announced the LockService[1]
// this prevents concurrent access overwritting data
// [1] http://googleappsdeveloper.blogspot.co.uk/2011/10/concurrency-and-google-apps-script.html
// we want a public lock, one that locks for all invocations
var lock = LockService.getPublicLock();
lock.waitLock(30000); // wait 30 seconds before conceding defeat.
try {
// next set where we write the data - you could write to multiple/alternate destinations
var doc = SpreadsheetApp.openById(SCRIPT_PROP.getProperty("CHANGED BY SECURITY REASON"));
var sheet = doc.getSheetByName(SHEET_NAME);
// we'll assume header is in row 1 but you can override with header_row in GET/POST data
var headRow = e.parameter.header_row || 1;
var headers = sheet.getRange(1, 1, 1, sheet.getLastColumn()).getValues()[0];
var nextRow = sheet.getLastRow()+1; // get next row
var row = [];
// loop through the header columns
for (i in headers){
if (headers[i] == "Timestamp"){ // special case if you include a 'Timestamp' column
row.push(new Date());
} else { // else use header name to get data
row.push(e.parameter[headers[i]]);
}
}
// more efficient to set values as [][] array than individually
sheet.getRange(nextRow, 1, 1, row.length).setValues([row]);
// return json success results
return ContentService
.createTextOutput(JSON.stringify({"result":"success", "row": nextRow}))
.setMimeType(ContentService.MimeType.JSON);
} catch(e){
// if error return this
return ContentService
.createTextOutput(JSON.stringify({"result":"error", "error": e}))
.setMimeType(ContentService.MimeType.JSON);
} finally { //release lock
lock.releaseLock();
}
}
function setup() {
var doc = SpreadsheetApp.getActiveSpreadsheet();
SCRIPT_PROP.setProperty("1Un5A61M8CJDBGDAB-Tx-lYgKYaVB2RSfn9QAQ5Q-sZs", doc.getId());
}
Where can I input:
doc.replaceText("\r\n|\n|\r",";[[:space:]]");
to work straight after the spreadsheet received the data? I can't open the sheet after the course is done to do it manually or even play the script.
(Sorry about any language mistake)
Thanks so much!!!!!!!!!!!!!!
Based from this related post, you can achieve this by reading in all values in the sheet (as an Array), looping over the array, replacing the values, then writing the entire array back to the sheet.
You may want to check the sample code in this thread.
Try this:
function fandr() {
var r=SpreadsheetApp.getActiveSheet().getDataRange();
var rws=r.getNumRows();
var cls=r.getNumColumns();
var i,j,a,find,repl;
find="abc";
repl="xyz";
for (i=1;i<=rws;i++) {
for (j=1;j<=cls;j++) {
a=r.getCell(i, j).getValue();
if (r.getCell(i,j).getFormula()) {continue;}
try {
a=a.replace(find,repl);
r.getCell(i, j).setValue(a);
}
catch (err) {continue;}
}
}
}
This time it will replace find in a string. I can put it back to only
replace if the string is find if that's better. Basically, I replace:
try {
a=a.replace(find,repl);
r.getCell(i, j).setValue(a);
}
catch (err) {continue;}
with
if (a==find) { r.getCell(i, j).setValue(repl);}
I'm trying to write a little script to make my coworkers and mine lives easier. I am trying to append lines to a spreadsheet based on information entered into a custom form. The code posted below just the doPost block which should be appending the google spreadsheet.
function doPost(form) {
var PN = form.PartNumber;
var REV = form.Revision;
var DATE = form.RevisionDate;
var DESC = form.Description;
var NOTE = form.PartNotes;
var URL = form.myFile.getURL();
var ss = SpreadsheetApp.openById("ID HERE"); // removed ID for sake of safety (let me be paranoid)
var sheet = ss.getSheetName('Uploads');
sheet.appendRow([PN,REV,DATE,DESC,NOTE,URL]);
}
I am unsure why it isn't writing to the spreadsheet but it isn't throwing me any errors. If you can offer any insight as to what is wrong I would greatly appreciate it; there are many guides online but most seem to be based on deprecated functions/code/etc.
Thanks for your time.
Instead of using doPost, set up a "On form submit" trigger.
You need to get the namedValues to be able to pull specific values and take the first output.
Also, it should be "getSheetByName('Uploads')" .
As pointed out in the previous answer, it is unclear what you are trying to achieve by "form.myFile.getURL();" If you want to get the form url you might as well create it as a string, as it always stays the same.
Here is a working example of your code:
function doPost(form) {
var formResponses = form.namedValues;
var PN = formResponses.PartNumber[0];
var REV = formResponses.Revision[0];
var DATE = formResponses.RevisionDate[0];
var DESC = formResponses.Description[0];
var NOTE = formResponses.PartNotes[0];
//var URL = form.myFile.getURL(); //Not sure what you are tyring to get here as form URL will always be the same.
var URL = "Your form's url"; //You can put the form url in here so it will be pushed in to every row.
var ss = SpreadsheetApp.openById("ID HERE"); // removed ID for sake of safety (let me be paranoid)
var sheet = ss.getSheetByName('Uploads');
sheet.appendRow([PN,REV,DATE,DESC,NOTE,URL]);
}
The form fields are nested in a "parameter" property in the doPost parameter.
So, you should access them using:
function doPost(form) {
var actualForm = form.parameter;
var PN = actualForm.PartNumber;
//etc
To double check all parameters your receiving and their names, you could append to your sheet everything stringfied, like this:
sheet.appendRow([JSON.stringify(form)]);
--edit
This form.myFile.getURL() also looks odd. I guess another good debugging trick you could do is to wrap everything in a try-catch and email yourself any errors you get. For example:
function doPost(form) {
try {
//all your code
} catch(err) {
MailApp.sendMail('yourself#etc', 'doPost error', err+'\n\n'+JSON.stringify(form));
}
}
On form submit
onFormSubmit works. "doPost" looks wrong.
Simple example:
function Initialize() {
var triggers = ScriptApp.getProjectTriggers();
for(var i in triggers) {
ScriptApp.deleteTrigger(triggers[i]);
}
ScriptApp.newTrigger("SendGoogleForm")
.forSpreadsheet(SpreadsheetApp.getActiveSpreadsheet())
.onFormSubmit()
.create();
}
function SendGoogleForm(e)
{
try
{
Full example - Scroll down to the code http://www.labnol.org/internet/google-docs-email-form/20884/ (Note: example sends email)
Trigger docs: https://developers.google.com/apps-script/guides/triggers/events
Notes: I think the problem is doPost, Does it work with google Forms? Never seen it used with google forms.
First and foremost, thank you everyone who has responded with information thus far. None of the solutions posted here worked for my particular implementation (my implementation is probably to blame, it is very crude), but they definitely set me down the path to a working version of my form which we now lightly use. I have posted some of the code below:
function sheetFill(form, link) {
try {
var formResponses = form.namedValues;
var toForm = [0,0,0,0,0,0,0];
for (i=0;i < form.PartNumber.length;i++){
toForm[0] = toForm[0]+form.PartNumber[i];
}
... (several for loops later)
var d = new Date();
var ss = SpreadsheetApp.openById("IDHERE");
var sheet = ss.getCurrentSheet;
ss.appendRow([toForm[0], toForm[1], toForm[2], toForm[3], toForm[4], toForm[5], toForm[6], link, d]);
} catch(err) {
MailApp.sendEmail('EMAIL', 'doPost error', err+'\n\n'+JSON.stringify(form));
}
}
It is not very versatile or robust and isn't elegant, but it is a starting point.
I'm learning FRP using Bacon.js, and would like to assemble data from a paginated API in a stream.
The module that uses the data has a consumption API like this:
// UI module, displays unicorns as they arrive
beautifulUnicorns.property.onValue(function(allUnicorns){
console.log("Got "+ allUnicorns.length +" Unicorns");
// ... some real display work
});
The module that assembles the data requests sequential pages from an API and pushes onto the stream every time it gets a new data set:
// beautifulUnicorns module
var curPage = 1
var stream = new Bacon.Bus()
var property = stream.toProperty()
var property.onValue(function(){}) # You have to add an empty subscriber, otherwise future onValues will not receive the initial value. https://github.com/baconjs/bacon.js/wiki/FAQ#why-isnt-my-property-updated
var allUnicorns = [] // !!! stateful list of all unicorns ever received. Is this idiomatic for FRP?
var getNextPage = function(){
/* get data for subsequent pages.
Skipping for clarity */
}
var gotNextPage = function (resp) {
Array.prototype.push.apply(allUnicorns, resp) // just adds the responses to the existing array reference
stream.push(allUnicorns)
curPage++
if (curPage <= pageLimit) { getNextPage() }
}
How do I subscribe to the stream in a way that provides me a full list of all unicorns ever received? Is this flatMap or similar? I don't think I need a new stream out of it, but I don't know. I'm sorry, I'm new to the FRP way of thinking. To be clear, assembling the array works, it just feels like I'm not doing the idiomatic thing.
I'm not using jQuery or another ajax library for this, so that's why I'm not using Bacon.fromPromise
You also may wonder why my consuming module wants the whole set instead of just the incremental update. If it were just appending rows that could be ok, but in my case it's an infinite scroll and it should draw data if both: 1. data is available and 2. area is on screen.
This can be done with the .scan() method. And also you will need a stream that emits items of one page, you can create it with .repeat().
Here is a draft code (sorry not tested):
var itemsPerPage = Bacon.repeat(function(index) {
var pageNumber = index + 1;
if (pageNumber < PAGE_LIMIT) {
return Bacon.fromCallback(function(callback) {
// your method that talks to the server
getDataForAPage(pageNumber, callback);
});
} else {
return false;
}
});
var allItems = itemsPerPage.scan([], function(allItems, itemsFromAPage) {
return allItems.concat(itemsFromAPage);
});
// Here you go
allItems.onValue(function(allUnicorns){
console.log("Got "+ allUnicorns.length +" Unicorns");
// ... some real display work
});
As you noticed, you also won't need .onValue(function(){}) hack, and curPage external state.
Here is a solution using flatMap and fold. When dealing with network you have to remember that the data can come back in a different order than you sent the requests - that's why the combination of fold and map.
var pages = Bacon.fromArray([1,2,3,4,5])
var requests = pages.flatMap(function(page) {
return doAjax(page)
.map(function(value) {
return {
page: page,
value: value
}
})
}).log("Data received")
var allData = requests.fold([], function(arr, data) {
return arr.concat([data])
}).map(function(arr) {
// I would normally write this as a oneliner
var sorted = _.sortBy(arr, "page")
var onlyValues = _.pluck(sorted, "value")
var inOneArray = _.flatten(onlyValues)
return inOneArray
})
allData.log("All data")
function doAjax(page) {
// This would actually be Bacon.fromPromise($.ajax...)
// Math random to simulate the fact that requests can return out
// of order
return Bacon.later(Math.random() * 3000, [
"Page"+page+"Item1",
"Page"+page+"Item2"])
}
http://jsbin.com/damevu/4/edit
sorry if this is an elementary question, but I just can't get this to work the way I need.
I have a script that essentially consists of 3 parts:
1). Removes all protection in a sheet
2). Executes some copying functions (since ranges are protected I need to remove the protection first #1)
3). Sets the protection back up after #2 is finished.
Here's my code:
First clears protection
var ss = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('COST REPORT');
var protections = ss.getProtections(SpreadsheetApp.ProtectionType.RANGE);
for (var i = 0; i < protections.length; i++) {
var protection = protections[i];
if (protection.canEdit()) {
protection.remove();
}
}
Second clears data in cells
var costReport = SpreadsheetApp.getActiveSpreadsheet().getSheetByName(
'COST REPORT');
costReport.getRange('F12:F16').clearContent(); //Theoreticals
costReport.getRange('D20:D20').clearContent(); //Week Ending Date
Third sets protection
var ss = SpreadsheetApp.getActive().getSheetByName('COST REPORT');
var costReportCOGS = ss.getRange('G11:G16');
var protection = costReportCOGS.protect().setDescription('costReportCOGS');
var me = Session.getEffectiveUser();
protection.addEditor(me);
protection.removeEditors(protection.getEditors());
if (protection.canDomainEdit()) {
protection.setDomainEdit(false);
}
var costReportPurchaseEnding = ss.getRange('D11:E16');
var protection = costReportPurchaseEnding.protect().setDescription(
'costReportPurchaseEnding');
var me = Session.getEffectiveUser();
protection.addEditor(me);
protection.removeEditors(protection.getEditors());
if (protection.canDomainEdit()) {
protection.setDomainEdit(false);
}
I've cut some of the script down for ease of debugging, but basically I need the script to Execute & Finish in this order, one by one. If you just try running the script the way it is, the protection doesn't get removed and I get the error "trying to edit protected range...."
If I run each block by itself then it works perfect, but that consists of 3 different scripts the user has to run and I need it all in one.
Thanks in advance!
Sean.
Something like this?
function removeProtection() {
var ss = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('COST REPORT');
var protections = ss.getProtections(SpreadsheetApp.ProtectionType.RANGE);
for (var i = 0; i < protections.length; i++) {
var protection = protections[i];
if (protection.canEdit()) {
protection.remove();
}
}
};
function clearRangeData() {
var costReport = SpreadsheetApp.getActiveSpreadsheet().getSheetByName(
'COST REPORT');
costReport.getRange('F12:F16').clearContent(); //Theoreticals
costReport.getRange('D20:D20').clearContent(); //Week Ending Date
};
function weeklyFileRangeProtection() {
//COST REPORT
var ss = SpreadsheetApp.getActive().getSheetByName('COST REPORT');
var costReportCOGS = ss.getRange('G11:G16');
var protection = costReportCOGS.protect().setDescription('costReportCOGS');
var me = Session.getEffectiveUser();
protection.addEditor(me);
protection.removeEditors(protection.getEditors());
if (protection.canDomainEdit()) {
protection.setDomainEdit(false);
}
};
You are having issues because for each function you are calling SpreadsheetApp.getActiveSpreadsheet . Each time you make this call you create a virtual "copy" of the spreadhseet, and the changes you make to this copy are only passed to the version in Google's servers once the whole script is finished. Hence, if you manually run each of the 3 function that the workflow:
Run function 1 -> script finished -> update the spreadsheet in the server -> run function 2 (which now gets the updated spreadsheet) -> script finished -> update the spreadsheet in the server -> run function 3 (which now gets the re-updated spreadsheet) -> script finished -> update the spreadsheet in the server
Now, if you run the three functions, the way the script is here is what happens:
var ss = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('COST REPORT'); this creates a virtual copy of the spreadsheet -> your code removes the protection from this copy and the server spreadsheet is not modified -> you call again var costReport = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('COST REPORT'); which create a new copy of the server spreadsheet, which hadn't its protections removed yet -> your code tries to clear the data on this copy, which triggers the error.
As #Cameron Roberts suggested in his answer Spreadsheet.flush() between the calls will solve the issue, because if forces the changes to be synced to the spreadsheet in the server. But you will have another "problem", which is the amount of copies you are calling, the .getActiveSpreadsheet() is very time consuming! It is better if you make only one call, store in a variable (you already do that, it is your variable ss) and make all the edits to that.
Your code will end up looking like this:
var ss = SpreadsheetApp.getActiveSpreadsheet();
var costReport = ss.getSheetByName('COST REPORT');
//First clear protection
var protections = costReport.getProtections(SpreadsheetApp.ProtectionType.RANGE);
for (var i = 0; i < protections.length; i++) {
var protection = protections[i];
if (protection.canEdit()) {
protection.remove();
};
};
//Second clears data in cells
costReport.getRange('F12:F16').clearContent(); //Theoreticals
costReport.getRange('D20:D20').clearContent(); //Week Ending Date
//Third sets protection
var costReportCOGS = costReport.getRange('G11:G16');
var protection = costReportCOGS.protect().setDescription('costReportCOGS');
var me = Session.getEffectiveUser();
protection.addEditor(me);
protection.removeEditors(protection.getEditors());
if (protection.canDomainEdit()) {
protection.setDomainEdit(false);
};
var costReportPurchaseEnding = costReport.getRange('D11:E16');
var protection = costReportPurchaseEnding.protect().setDescription(
'costReportPurchaseEnding');
var me = Session.getEffectiveUser();
protection.addEditor(me);
protection.removeEditors(protection.getEditors());
if (protection.canDomainEdit()) {
protection.setDomainEdit(false);
};
This method also applies to Google Docs, which does NOT have a similar .flush() method for updating the server version.
I believe you have misdiagnosed the issue slightly. The code is already running in the correct order, but the protection is simply not being removed before the write calls are executed, due to the nature of Google's underlying architecture.
The comments steering you towards asynchronous behaviour are not helpful in this case, they do make sense from a Javascript perspective but are not the issue here, this is an Apps Script / Google Sheets issue, none of the functions you are calling are asynchronous.
I have two suggestions, one is to try calling SpreadsheetApp.flush() after the protections are removed. The other is to use Utilities.sleep() to artificially pause the script for a brief period after executing the remove() calls.
https://developers.google.com/apps-script/reference/spreadsheet/spreadsheet-app#flush()
https://developers.google.com/apps-script/reference/utilities/utilities#sleep(Integer)
So I'm using node.js and the module instagram-node-lib to download metadata for Instagram posts. I have a couple of hashtags that I want to search for, and I want to download all existing posts (handling request failure during pagination) as well as monitor all new posts.
I have managed to crack the first part - downloading all existing posts and handling failure (I noticed that sometimes the Instagram API would just fail on me, so I've added redundancy to remember the last successful page I downloaded and attempt again from that point). For anyone who is interested, here is my code (note, I use Postgres to save the posts, and I've abbreviated/obfuscated some of the code for ease of reading and for commercial purposes) **apologies for the length of code, but I think this will come in useful to someone:
var db = new (require('./postgres'))
,api = require("instagram-node-lib")
;
var HASHTAGS = ["fluffy", "kittens"] //this is just an example!
,CLIENT_ID = "YOUR_CLIENT_ID"
,CLIENT_SECRET = "YOUR_CLIENT_SECRET"
,HOST = "https://api.instagram.com"
,PORT = 443
,PATH = "/v1/media/popular?client_id=" + CLIENT_ID
;
var hashtagIndex = 0
,settings
;
/**
* Initialise the module for use
*/
exports.initialise = function(){
api.set("client_id", CLIENT_ID);
api.set("client_secret", CLIENT_SECRET);
if( !settings){
settings = {
hashtags: []
}
for( var i in HASHTAGS){
settings.hashtags[i] = {
name: HASHTAGS[i],
maxTagId: null,
minTagId: null,
nextMaxTagId: null,
}
}
}
// console.log(settings);
db.initialiseSettings(); //I haven't included the code for this - basically just loads settings from the database, overwriting the defaults above if they exist, otherwise it creates them using the above object. I store the settings as a JSON object in the DB and parse them on load
execute();
}
function execute(){
var params = {
name: HASHTAGS[hashtagIndex],
complete: function(data, pagination){
var hashtag = settings.hashtags[hashtagIndex];
//from scratch
if( !hashtag.maxTagId){
console.log('Downloading old posts from scratch');
getOldPosts();
}
//still loading old (previously failed)
else if( hashtag.nextMaxTagId){
console.log('Downloading old posts from last saved position');
getOldPosts(hashtag.nextMaxTagId);
}
//new posts only
else {
console.log('Downloading new posts only');
getNewPosts(hashtag.minTagId);
}
},
error: function(msg, obj, caller){
apiError(msg, obj, caller);
}
}
api.tags.info(params);
}
function getOldPosts(maxTagId){
console.log();
var params = {
name: HASHTAGS[hashtagIndex],
count: 100,
max_tag_id: maxTagId || undefined,
complete: function(data, pagination){
console.log(pagination);
var hashtag = settings.hashtags[hashtagIndex];
//reached the end
if( pagination.next_max_tag_id == hashtag.maxTagId){
console.log('Downloaded all posts for #' + HASHTAGS[hashtagIndex]);
hashtag.nextMaxTagId = null; //reset nextMaxTagId - that way next time we execute the script we know to just look for new posts
saveSettings(function(){
next();
}); //Another function I haven't include - just saves the settings object, overwriting what is in the database. Once saved, executes the next() function
}
else {
//from scratch
if( !hashtag.maxTagId){
//these values will be saved once all posts in this batch have been saved. We set these only once, meaning that we have a baseline to compare to - enabling us to determine if we have reached the end of pagination
hashtag.maxTagId = pagination.next_max_tag_id;
hashtag.minTagId = pagination.min_tag_id;
}
//if there is a failure then we know where to start from - this is only saved to the database once the posts are successfully saved to database
hashtag.nextMaxTagId = pagination.next_max_tag_id;
//again, another function not included. saves the posts to database, then updates the settings. Once they have completed we get the next page of data
db.savePosts(data, function(){
saveSettings(function(){
getOldPosts(hashtag.nextMaxTagId);
});
});
}
},
error: function(msg, obj, caller){
apiError(msg, obj, caller);
//keep calm and try again - this is our failure redundancy
execute();
}
}
var posts = api.tags.recent(params);
}
/**
* Still to be completed!
*/
function getNewPosts(minTagId){
}
function next(){
if( hashtagIndex < HASHTAGS.length - 1){
console.log("Moving onto the next hashtag...");
hashtagIndex++;
execute();
}
else {
console.log("All hashtags processed...");
}
}
Ok so here is my dilema about solving the next piece of the puzzle - downloading new posts (in other words, only those new posts that have come into existence since I last downloaded all the posts). Should I use Instagram subscriptions or is there a way to implement paging similar to what I've already used? I'm worried that if I use the former solution then if there is a problem with my server and it goes down for a period of time then I will miss out on some posts. I' worried that if I use the latter solution then it might not be possible to page through the records, because is the Instagram API set up to enable forward paging rather than backward paging?
I've attempted to post questions in the Google Instagram API Developers Group a couple of times and none of my messages seem to be appearing in the forum so I thought I'd resort to trusty stackoverflow