I'm having some trouble with my script. Basically, that works getting data from one course and inputting the values to one Sheet. That's working perfectly.
But when one of my students input 'enter' command at that course, I have trouble to read it in Excel. SO, I have to find and replace the enter at Google Spreadsheet and change it for "; ".
Works perfectly doing it manually, but I can't do it by script.
Here the piece:
// 1. Enter sheet name where data is to be written below
var SHEET_NAME = "DATA";
// 2. Run > setup
//
// 3. Publish > Deploy as web app
// - enter Project Version name and click 'Save New Version'
// - set security level and enable service (most likely execute as 'me' and access 'anyone, even anonymously)
//
// 4. Copy the 'Current web app URL' and post this in your form/script action
//
// 5. Insert column names on your destination sheet matching the parameter names of the data you are passing in (exactly matching case)
var SCRIPT_PROP = PropertiesService.getScriptProperties(); // new property service
// If you don't want to expose either GET or POST methods you can comment out the appropriate function
function doGet(e){
return handleResponse(e);
}
function doPost(e){
return handleResponse(e);
}
function handleResponse(e) {
// shortly after my original solution Google announced the LockService[1]
// this prevents concurrent access overwritting data
// [1] http://googleappsdeveloper.blogspot.co.uk/2011/10/concurrency-and-google-apps-script.html
// we want a public lock, one that locks for all invocations
var lock = LockService.getPublicLock();
lock.waitLock(30000); // wait 30 seconds before conceding defeat.
try {
// next set where we write the data - you could write to multiple/alternate destinations
var doc = SpreadsheetApp.openById(SCRIPT_PROP.getProperty("CHANGED BY SECURITY REASON"));
var sheet = doc.getSheetByName(SHEET_NAME);
// we'll assume header is in row 1 but you can override with header_row in GET/POST data
var headRow = e.parameter.header_row || 1;
var headers = sheet.getRange(1, 1, 1, sheet.getLastColumn()).getValues()[0];
var nextRow = sheet.getLastRow()+1; // get next row
var row = [];
// loop through the header columns
for (i in headers){
if (headers[i] == "Timestamp"){ // special case if you include a 'Timestamp' column
row.push(new Date());
} else { // else use header name to get data
row.push(e.parameter[headers[i]]);
}
}
// more efficient to set values as [][] array than individually
sheet.getRange(nextRow, 1, 1, row.length).setValues([row]);
// return json success results
return ContentService
.createTextOutput(JSON.stringify({"result":"success", "row": nextRow}))
.setMimeType(ContentService.MimeType.JSON);
} catch(e){
// if error return this
return ContentService
.createTextOutput(JSON.stringify({"result":"error", "error": e}))
.setMimeType(ContentService.MimeType.JSON);
} finally { //release lock
lock.releaseLock();
}
}
function setup() {
var doc = SpreadsheetApp.getActiveSpreadsheet();
SCRIPT_PROP.setProperty("1Un5A61M8CJDBGDAB-Tx-lYgKYaVB2RSfn9QAQ5Q-sZs", doc.getId());
}
Where can I input:
doc.replaceText("\r\n|\n|\r",";[[:space:]]");
to work straight after the spreadsheet received the data? I can't open the sheet after the course is done to do it manually or even play the script.
(Sorry about any language mistake)
Thanks so much!!!!!!!!!!!!!!
Based from this related post, you can achieve this by reading in all values in the sheet (as an Array), looping over the array, replacing the values, then writing the entire array back to the sheet.
You may want to check the sample code in this thread.
Try this:
function fandr() {
var r=SpreadsheetApp.getActiveSheet().getDataRange();
var rws=r.getNumRows();
var cls=r.getNumColumns();
var i,j,a,find,repl;
find="abc";
repl="xyz";
for (i=1;i<=rws;i++) {
for (j=1;j<=cls;j++) {
a=r.getCell(i, j).getValue();
if (r.getCell(i,j).getFormula()) {continue;}
try {
a=a.replace(find,repl);
r.getCell(i, j).setValue(a);
}
catch (err) {continue;}
}
}
}
This time it will replace find in a string. I can put it back to only
replace if the string is find if that's better. Basically, I replace:
try {
a=a.replace(find,repl);
r.getCell(i, j).setValue(a);
}
catch (err) {continue;}
with
if (a==find) { r.getCell(i, j).setValue(repl);}
Related
I am working on Microsoft Excel Add-in in react, I want to assign colors to the specific cells based on the value in it i.e, by color code (See Board Color column in image below). So what I did is, get column of the table, and iterate through that column cells and load "value" for each cell. Then iterate on array of those values and try to fill a respective color to each cell.
on iteration, Values are printed in console correctly, but color are not being filled, now it has become headache for me. It is only happening if I assign color in/after "await this.context.sync()". Before that, Color are filled (checked by dummy cells).
Below is the code:
// ExcelAPI.js class
async setBoardColor() {
console.log("setBoardColor()");
try {
let workItemIDColumn = this.workitemTable.columns.getItem("BOARD COLOR").getDataBodyRange().load("values"); // .load(["values", "address"]);
// let workItemIDColumn = await range.load(["values", "address"]);
console.log("Workitem IDs 00 : ", workItemIDColumn);
// console.log("Workitem IDs count : ", workItemIDColumn.values.length);
await this.context.sync().then(() => {
let accountHeaderRange = this.currentWorksheet.getRange("A2:B2");
accountHeaderRange.format.fill.color = "red";
for (var row = 0; row < workItemIDColumn.values.length; row++) {
console.log("in loop : ", row);
console.log("values : " + workItemIDColumn.values[row][0]);
// workItemIDColumn.getRow(row).getRange("A2:B2").format.fill.color = "red";
if (workItemIDColumn.values[row][0] != "") {
console.log("I am in");
workItemIDColumn.getRow(row).getResizedRange(0, -2).format.fill.color = "red"; // workItemIDColumn.values[row][0];
// workItemIDColumn.getRow(row).format.protection.locked = false;
}
}
});
// console.log("Workitem IDs check : ");
} catch (error) {
console.log("setBoardColor() ", error);
}
}
Calling of above method is in main class component.
renderExcelContent = async (workItems) => {
try {
await Excel.run(async (context) => {
const currentWorksheet = context.workbook.worksheets.getActiveWorksheet();
let excelAPI = new ExcelAPI(context, currentWorksheet);
excelAPI.setBoardColor();
currentWorksheet.getRange("A2:B2").format.protection.locked = false;
currentWorksheet.protection.protect();
// eslint-disable-next-line no-undef
if (Office.context.requirements.isSetSupported("ExcelApi", "1.2")) {
currentWorksheet.getUsedRange().format.autofitColumns();
currentWorksheet.getUsedRange().format.autofitRows();
}
currentWorksheet.activate();
context.runtime.enableEvents = true;
});
} catch (error) {
console.error(error);
}
};
That's just how the Excel JavaScript API works. The things you're doing before calling sync are just interacting with JavaScript objects that aren't actually in the spreadsheet. It's the sync call that writes the changes you've made to the actual worksheet. So if you want to see changes, you have to use sync.
From the documentation (my emphasis):
Excel.RequestContext class
The RequestContext object facilitates requests to the Excel application. Since the Office add-in and the Excel application run in two different processes, the request context is required to get access to the Excel object model from the add-in.
and from sync:
Synchronizes the state between JavaScript proxy objects and the Office document, by executing instructions queued on the request context and retrieving properties of loaded Office objects for use in your code.
(Note that an implicit sync happens after your Excel.run callback returns.)
I have fixed this by "await" before calling setBoardColor() in main class.
await excelAPI.setBoardColor();
Script must log postback information about call detail from zvonok.com to google spreadsheets. I has write function which only append row to sreadsheet - no update or modify of any cell in code and during few manual test calls rows has been append correct, but when my client began his usual call campaign, calls and postbacks going very often one after other, values in last row began changing few times and in some cases leave strange values
I seen behavior like this first time made short video record:
https://youtu.be/0_H_mVAbp4g
here is one column with strange value
2103052006092385
2,10305E+15
210305412464544
I have found 9 cases from 248 rows.
Client has show me excel from his user cabinet, totally was maded 5649 calls, so in google spreadsheets must be 5649 rows instead 248.
function getJsonFromUrl(url) {
var query = url;
var result = {};
if (query == undefined){
return result;
}
query.split("&").forEach(function(part) {
var item = part.split("=");
result[item[0]] = decodeURIComponent(item[1]);
});
return result;
}
function doGet(e){
const ctCompl = 'ct_completed';
var doc = SpreadsheetApp.openById(SHEET_KEY);
var sheet = doc.getSheetByName(SHEET_NAME);
var row = [];
if(typeof e !== undefined){
mArr = getJsonFromUrl(e.queryString);
for (i in mArr) if( i == ctCompl) {
row.push(convTimeLong(mArr[i]));
} else
row.push(mArr[i]);
sheet.appendRow(row);
} else {
sheet.appendRow(['e undefined!']);
}
SpreadsheetApp.flush();
return handleResponse(e)
}
function convTimeLong(dateTime) {
let d = new Date();
let dt=dateTime.replace('+', 'T');
try {
var res = Utilities.formatDate(d,"GMT+2", "dd.MM.yyyy HH:mm");
return res
} catch(e){
return dateTime; }
}
executions dashboard show status "completed' everywhere, execution time longest - 1.688 s
Client has set delay 5 second between call's, right now I don't now is percent of lost postback's decreased after delay was set or not, but it still very high.
https://youtu.be/0_H_mVAbp4g
In general, using Google Sheets as a database is a bad idea. It's not designed for this so it could fail really bad. Using a proper database will make everything much, much easier. If you are using the spreadsheet to then cook the data, I'd advise to use a function that imports data like IMPORTXML (see reference).
That being said, if you insist on using Sheets, you could try using locks:
function appendRow(sheet, row) {
const lock = LockService.getScriptLock()
while (!lock.tryLock(100000)) /* Spin the lock until it gets aquired */;
try {
sheet.appendRow(row)
SpreadsheetApp.flush()
} finally {
lock.releaseLock()
}
}
To use it, you only need to pass the sheet and the values to add: sheet.appendRow(row) to appendRow(sheet, row).
It will make sure that entries don't get overridden. Note that this will slow down the code a lot and the script can time out if there are a lot of requests.
I am writting a modest firefox add-on and I have some problems getting the results used inside the "flow" of the add-on script.
I have the code taking care of querying a sqlite database as a module but I don't know how to create a callback inside of it so that the pagemod in the add-on script can use it and pass it to the content script.
Basically here is what I have:
main.js :
var pageMod = require("sdk/page-mod");
var self = require("sdk/self");
var myDbScript = require('./myDbScript');
pageMod.PageMod({
include: "*.example.com/*",
contentScriptFile: [self.data.url('jquery-1.10.2.min.js'),
self.data.url('myContentScript.js')],
onAttach: function(worker) {
// Query the database on behalf of the content script
worker.port.on('queryTheDB', function(message) {
// Get the data from the DB (é is some test value here)
// Not working because asynchronous querying of the DB
var resultFromDB = myDbScript.getResult(2);
// Send the result to the content script
worker.port.emit('hereIsYourResult', resultFromDB);
});
}
});
myDBScript.js
// Get required components
var {components} = require("chrome");
components.utils.import("resource://gre/modules/FileUtils.jsm");
components.utils.import("resource://gre/modules/Services.jsm");
// Some code to get the DB
// Create statement to retrieve country based on the IP
var statement = dbConnection.createStatement("SELECT col1, col2 FROM table WHERE col1 = :given_col1");
function getResult(submittedValue) {
// Bind parameters
statement.params.given_col1 = submittedValue;
// Execute
statement.executeAsync({
handleResult: function(aResultSet) {
for (let row = aResultSet.getNextRow();
row;
row = aResultSet.getNextRow()) {
var resultFromDB = row.getResultByName("col2");
}
},
handleError: function(aError) {
print("Error: " + aError.message);
return 'error';
},
handleCompletion: function(aReason) {
if (aReason != components.interfaces.mozIStorageStatementCallback.REASON_FINISHED) {
print("Query canceled or aborted!");
return 'canceledOrAborted';
} else {
// Sending the result to the add-on script so that it can
// pass it to the content script
notifyingTheAddonScript(resultFromDB);
}
}
});
}
// Enable the use of the getResult function
exports.getResult = getResult;
The thing is that I don't see how to have the addon script be aware that the result is ready. Please bear with me, I am a noob at this...
Since I don't have the full source, I cannot test. So you'll have to fix any I made errors yourself ;)
First, lets add a callback.
// #param {function(result, error)} callback
// Called upon query completion.
// if |error| is a string, then the query failed.
// Else |result| will contain an array of values.
function getResult(submittedValue, callback) { // changed
// Bind parameters
statement.params.given_col1 = submittedValue;
var rv = [], err = null; // added
// Execute
statement.executeAsync({
handleResult: function(aResultSet) {
for (let row = aResultSet.getNextRow();
row;
row = aResultSet.getNextRow()) {
rv.push(row.getResultByName("col2")); // changed
}
},
handleError: function(aError) {
print("Error: " + aError.message);
err = aError.message; // changed
},
handleCompletion: function(aReason) {
if (aReason != components.interfaces.mozIStorageStatementCallback.REASON_FINISHED) {
print("Query canceled or aborted!");
err = err || 'canceled or aborted'; // changed
}
callback(err ? null : rv, err); // replaced
}
});
}
Lets use this stuff now in the pagemod
onAttach: function(worker) {
// Query the database on behalf of the content script
worker.port.on('queryTheDB', function(message) {
// Get the data from the DB (é is some test value here)
// Not working because asynchronous querying of the DB
myDbScript.getResult(2, function callback(result, error) {
if (error) {
worker.port.emit("hereIsYourError", error);
return;
}
worker.port.emit("hereIsYourResult", result);
});
});
}
You might want to take some precautions not to fire multiple queries. While it would be OK to do so, it might hurt performance ;)
Since our callback already looks kinda like a promise, it might actually be a good idea to use promises, maybe even with the Sqlite.jsm module and some Task.jsm magic.
In my Win 8 app, based on a blank template, I have successfully added search contract and it seems to work despite the fact that I have not linked it to any data yet, so, for now, when I search any term in my app it simply takes me to the searchResults page with the message "No Results Found" this is what I was expecting initially.
Now what I wish to do is link my database into the searchResults.js file so that I can query my database. Now outside of the search contract I have tested and connected my Db and it works; I did this using WinJS.xhr, to connect to my web-service which in turn queries my database and returns a JSON object.
In my test I only hardcoded the url, however I now need to do two things. Move the test WinJS.xr data for connecting my DB into the search contract code, and second - change the hardcoded url to a dynamic url that accepts the users search term.
From what I understand of Win 8 search so far the actual data querying part of the search contract is as follows:
// This function populates a WinJS.Binding.List with search results for the provided query.
_searchData: function (queryText) {
var originalResults;
// TODO: Perform the appropriate search on your data.
if (window.Data) {
originalResults = Data.items.createFiltered(function (item) {
return (item.termName.indexOf(queryText) >= 0 || item.termID.indexOf(queryText) >= 0 || item.definition.indexOf(queryText) >= 0);
});
} else {`enter code here`
originalResults = new WinJS.Binding.List();
}
return originalResults;
}
});
The code that I need to transfer into this section is as below; now I have to admit I do not currently understand the code block above and have not found a good resource for breaking it down line by line. If someone can help though it will be truly awesome! My code below, I basically want to integrate it and then make searchString be equal to the users search term.
var testTerm = document.getElementById("definition");
var testDef = document.getElementById("description");
var searchString = 2;
var searchFormat = 'JSON';
var searchurl = 'http://www.xxx.com/web-service.php?termID=' + searchString +'&format='+searchFormat;
WinJS.xhr({url: searchurl})
.done(function fulfilled(result)
{
//Show Terms
var searchTerm = JSON.parse(result.responseText);
// var terms is the key of the object (terms) on each iteration of the loop the var terms is assigned the name of the object key
// and the if stament is evaluated
for (terms in searchTerm) {
//terms will find key "terms"
var termName = searchTerm.terms[0].term.termName;
var termdefinition = searchTerm.terms[0].term.definition;
//WinJS.Binding.processAll(termDef, termdefinition);
testTerm.innerText = termName;
testDef.innerText = termdefinition;
}
},
function error(result) {
testDef.innerHTML = "Got Error: " + result.statusText;
},
function progress(result) {
testDef.innerText = "Ready state is " + result.readyState;
});
I will try to provide some explanation for the snippet that you didn't quite understand. I believe the code you had above is coming from the default code added by Visual Studio. Please see explanation as comments in line.
/**
* This function populates a WinJS.Binding.List with search results
* for the provided query by applying the a filter on the data source
* #param {String} queryText - the search query acquired from the Search Charm
* #return {WinJS.Binding.List} the filtered result of your search query.
*/
_searchData: function (queryText) {
var originalResults;
// window.Data is the data source of the List View
// window.Data is an object defined in YourProject/js/data.js
// at line 16 WinJS.Namespace.define("Data" ...
// Data.items is a array that's being grouped by functions in data.js
if (window.Data) {
// apply a filter to filter the data source
// if you have your own search algorithm,
// you should replace below code with your code
originalResults = Data.items.createFiltered(function (item) {
return (item.termName.indexOf(queryText) >= 0 ||
item.termID.indexOf(queryText) >= 0 ||
item.definition.indexOf(queryText) >= 0);
});
} else {
// if there is no data source, then we return an empty WinJS.Binding.List
// such that the view can be populated with 0 result
originalResults = new WinJS.Binding.List();
}
return originalResults;
}
Since you are thinking about doing the search on your own web service, then you can always make your _searchData function async and make your view waiting on the search result being returned from your web service.
_searchData: function(queryText) {
var dfd = new $.Deferred();
// make a xhr call to your service with queryText
WinJS.xhr({
url: your_service_url,
data: queryText.toLowerCase()
}).done(function (response) {
var result = parseResultArrayFromResponse(response);
var resultBindingList = WinJS.Binding.List(result);
dfd.resolve(result)
}).fail(function (response) {
var error = parseErrorFromResponse(response);
var emptyResult = WinJS.Binding.List();
dfd.reject(emptyResult, error);
});
return dfd.promise();
}
...
// whoever calls searchData would need to asynchronously deal with the service response.
_searchData(queryText).done(function (resultBindingList) {
//TODO: Display the result with resultBindingList by binding the data to view
}).fail(function (resultBindingList, error) {
//TODO: proper error handling
});
So I'm using node.js and the module instagram-node-lib to download metadata for Instagram posts. I have a couple of hashtags that I want to search for, and I want to download all existing posts (handling request failure during pagination) as well as monitor all new posts.
I have managed to crack the first part - downloading all existing posts and handling failure (I noticed that sometimes the Instagram API would just fail on me, so I've added redundancy to remember the last successful page I downloaded and attempt again from that point). For anyone who is interested, here is my code (note, I use Postgres to save the posts, and I've abbreviated/obfuscated some of the code for ease of reading and for commercial purposes) **apologies for the length of code, but I think this will come in useful to someone:
var db = new (require('./postgres'))
,api = require("instagram-node-lib")
;
var HASHTAGS = ["fluffy", "kittens"] //this is just an example!
,CLIENT_ID = "YOUR_CLIENT_ID"
,CLIENT_SECRET = "YOUR_CLIENT_SECRET"
,HOST = "https://api.instagram.com"
,PORT = 443
,PATH = "/v1/media/popular?client_id=" + CLIENT_ID
;
var hashtagIndex = 0
,settings
;
/**
* Initialise the module for use
*/
exports.initialise = function(){
api.set("client_id", CLIENT_ID);
api.set("client_secret", CLIENT_SECRET);
if( !settings){
settings = {
hashtags: []
}
for( var i in HASHTAGS){
settings.hashtags[i] = {
name: HASHTAGS[i],
maxTagId: null,
minTagId: null,
nextMaxTagId: null,
}
}
}
// console.log(settings);
db.initialiseSettings(); //I haven't included the code for this - basically just loads settings from the database, overwriting the defaults above if they exist, otherwise it creates them using the above object. I store the settings as a JSON object in the DB and parse them on load
execute();
}
function execute(){
var params = {
name: HASHTAGS[hashtagIndex],
complete: function(data, pagination){
var hashtag = settings.hashtags[hashtagIndex];
//from scratch
if( !hashtag.maxTagId){
console.log('Downloading old posts from scratch');
getOldPosts();
}
//still loading old (previously failed)
else if( hashtag.nextMaxTagId){
console.log('Downloading old posts from last saved position');
getOldPosts(hashtag.nextMaxTagId);
}
//new posts only
else {
console.log('Downloading new posts only');
getNewPosts(hashtag.minTagId);
}
},
error: function(msg, obj, caller){
apiError(msg, obj, caller);
}
}
api.tags.info(params);
}
function getOldPosts(maxTagId){
console.log();
var params = {
name: HASHTAGS[hashtagIndex],
count: 100,
max_tag_id: maxTagId || undefined,
complete: function(data, pagination){
console.log(pagination);
var hashtag = settings.hashtags[hashtagIndex];
//reached the end
if( pagination.next_max_tag_id == hashtag.maxTagId){
console.log('Downloaded all posts for #' + HASHTAGS[hashtagIndex]);
hashtag.nextMaxTagId = null; //reset nextMaxTagId - that way next time we execute the script we know to just look for new posts
saveSettings(function(){
next();
}); //Another function I haven't include - just saves the settings object, overwriting what is in the database. Once saved, executes the next() function
}
else {
//from scratch
if( !hashtag.maxTagId){
//these values will be saved once all posts in this batch have been saved. We set these only once, meaning that we have a baseline to compare to - enabling us to determine if we have reached the end of pagination
hashtag.maxTagId = pagination.next_max_tag_id;
hashtag.minTagId = pagination.min_tag_id;
}
//if there is a failure then we know where to start from - this is only saved to the database once the posts are successfully saved to database
hashtag.nextMaxTagId = pagination.next_max_tag_id;
//again, another function not included. saves the posts to database, then updates the settings. Once they have completed we get the next page of data
db.savePosts(data, function(){
saveSettings(function(){
getOldPosts(hashtag.nextMaxTagId);
});
});
}
},
error: function(msg, obj, caller){
apiError(msg, obj, caller);
//keep calm and try again - this is our failure redundancy
execute();
}
}
var posts = api.tags.recent(params);
}
/**
* Still to be completed!
*/
function getNewPosts(minTagId){
}
function next(){
if( hashtagIndex < HASHTAGS.length - 1){
console.log("Moving onto the next hashtag...");
hashtagIndex++;
execute();
}
else {
console.log("All hashtags processed...");
}
}
Ok so here is my dilema about solving the next piece of the puzzle - downloading new posts (in other words, only those new posts that have come into existence since I last downloaded all the posts). Should I use Instagram subscriptions or is there a way to implement paging similar to what I've already used? I'm worried that if I use the former solution then if there is a problem with my server and it goes down for a period of time then I will miss out on some posts. I' worried that if I use the latter solution then it might not be possible to page through the records, because is the Instagram API set up to enable forward paging rather than backward paging?
I've attempted to post questions in the Google Instagram API Developers Group a couple of times and none of my messages seem to be appearing in the forum so I thought I'd resort to trusty stackoverflow