I have a custom formula that gets the redirect of a link. It takes an array as an argument. Its based on a Google script found here. https://developers.google.com/apps-script/guides/sheets/functions
The problem is that the source list gets updated and the range changes. If the range isn't exact, it breaks. Here is the code to the custom function:
function DOUBLE(input) {
return input.map(getRedirects);
}
function getRedirects(input) {
var urlKey = input;
var cache = CacheService.getScriptCache();
var result = cache.get(urlKey);
if (!result) {
var params = {
'followRedirects': false,
'muteHttpExceptions': true
};
var res = UrlFetchApp.fetch(input, params);
var finalURL = res.getHeaders()['Location'];
cache.put(urlKey, finalURL, 21600);
result = finalURL;
}
return result;
}
And here's the spreadsheet: working sheet
This should help you out. Wrap this in a try .. catch block. The UrlFetchApp fails when it gets an empty input. When this is the case the catch block kicks in. Other option is writing an if statement. if input is not '' then do the fetch and return, else.. ect.
function DOUBLE(input) {
return input.map(getRedirects);
}
function getRedirects(input) {
var params = {
'followRedirects': false,
'muteHttpExceptions': true
};
try {
var res = UrlFetchApp.fetch(input, params);
var finalURL = res.getHeaders()['Location'];
return finalURL
} catch(error){
return undefined
}
}
EDIT ("normal" function): (Why is my code block like that? I even tried to use a online beautifier...)
Select the range in your sheet
Custom menu "Redirects" -> Run
Output is placed in the column next to it.
function onOpen(e) {
SpreadsheetApp.getUi().createMenu('Redirects')
.addItem('run', 'getRedirects')
.addToUi()
}
function getRedirects() {
const activeRange = SpreadsheetApp.getActiveRange()
const values = activeRange.getValues().flat()
const output = []
const params = {
'followRedirects': false,
'muteHttpExceptions': true
};
values.forEach(url => {
try {
const res = UrlFetchApp.fetch(url, params);
const finalURL = res.getHeaders()['Location'];
output.push([finalURL])
} catch (error) {
output.push([JSON.stringify(error)])
}
});
activeRange.offset(0, 1).setValues(output)
}
Related
I am trying to fetch data from different collections in my cloud Firestore database in advance before I process them and apply them to batch, I created two async functions, one to capture the data and another to execute certain code only after all data is collected, I didn't want the code executing and creating errors before the data is fetched when i try to access the matchesObject after the async function to collect data is finished, it keeps saying "it cannot access a property matchStatus of undefined", i thought took care of that with async and await? could anyone shed some light as to why it is undefined one moment
axios.request(options).then(function(response) {
console.log('Total matches count :' + response.data.matches.length);
const data = response.data;
var matchesSnapshot;
var marketsSnapshot;
var tradesSnapshot;
var betsSnapshot;
matchesObject = {};
marketsObject = {};
tradesObject = {};
betsObject = {};
start();
async function checkDatabase() {
matchesSnapshot = await db.collection('matches').get();
matchesSnapshot.forEach(doc => {
matchesObject[doc.id] = doc.data();
console.log('matches object: ' + doc.id.toString())
});
marketsSnapshot = await db.collection('markets').get();
marketsSnapshot.forEach(doc2 => {
marketsObject[doc2.id] = doc2.data();
console.log('markets object: ' + doc2.id.toString())
});
tradesSnapshot = await db.collection('trades').get();
tradesSnapshot.forEach(doc3 => {
tradesObject[doc3.id] = doc3.data();
console.log('trades object: ' + doc3.id.toString())
});
betsSnapshot = await db.collection('bets').get();
betsSnapshot.forEach(doc4 => {
betsObject[doc4.id] = doc4.data();
console.log('bets object: ' + doc4.id.toString())
});
}
async function start() {
await checkDatabase();
// this is the part which is undefined, it keeps saying it cant access property matchStatus of undefined
console.log('here is matches object ' + matchesObject['302283']['matchStatus']);
if (Object.keys(matchesObject).length != 0) {
for (let bets of Object.keys(betsObject)) {
if (matchesObject[betsObject[bets]['tradeMatchId']]['matchStatus'] == 'IN_PLAY' && betsObject[bets]['matched'] == false) {
var sfRef = db.collection('users').doc(betsObject[bets]['user']);
batch11.set(sfRef, {
accountBalance: admin.firestore.FieldValue + parseFloat(betsObject[bets]['stake']),
}, {
merge: true
});
var sfRef = db.collection('bets').doc(bets);
batch12.set(sfRef, {
tradeCancelled: true,
}, {
merge: true
});
}
}
}
});
There are too many smaller issues in the current code to try to debug them one-by-one, so this refactor introduces various tests against your data. It currently won't make any changes to your database and is meant to be a replacement for your start() function.
One of the main differences against your current code is that it doesn't unnecessarily download 4 collections worth of documents (two of them aren't even used in the code you've included).
Steps
First, it will get all the bet documents that have matched == false. From these documents, it will check if they have any syntax errors and report them to the console. For each valid bet document, the ID of it's linked match document will be grabbed so we can then fetch all the match documents we actually need. Then we queue up the changes to the user's balance and the bet's document. Finally we report about any changes to be done and commit them (once you uncomment the line).
Code
Note: fetchDocumentById() is defined in this gist. Its a helper function to allow someCollectionRef.where(FieldPath.documentId(), 'in', arrayOfIds) to take more than 10 IDs at once.
async function applyBalanceChanges() {
const betsCollectionRef = db.collection('bets');
const matchesCollectionRef = db.collection('matches');
const usersCollectionRef = db.collection('users');
const betDataMap = {}; // Record<string, BetData>
await betsCollectionRef
.where('matched', '==', false)
.get()
.then((betsSnapshot) => {
betsSnapshot.forEach(betDoc => {
betDataMap[betDoc.id] = betDoc.data();
});
});
const matchDataMap = {}; // Record<string, MatchData | undefined>
// betIdList contains all IDs that will be processed
const betIdList = Object.keys(betDataMap).filter(betId => {
const betData = betDataMap[betId];
if (!betData) {
console.log(`WARN: Skipped Bet #${betId} because it was falsy (actual value: ${betData})`);
return false;
}
const matchId = betData.tradeMatchId;
if (!matchId) {
console.log(`WARN: Skipped Bet #${betId} because it had a falsy match ID (actual value: ${matchId})`);
return false;
}
if (!betData.user) {
console.log(`WARN: Skipped Bet #${betId} because it had a falsy user ID (actual value: ${userId})`);
return false;
}
const stakeAsNumber = Number(betData.stake); // not using parseFloat as it's too lax
if (isNaN(stakeAsNumber)) {
console.log(`WARN: Skipped Bet #${betId} because it had an invalid stake value (original NaN value: ${betData.stake})`);
return false;
}
matchDataMap[matchId] = undefined; // using undefined because its the result of `doc.data()` when the document doesn't exist
return true;
});
await fetchDocumentsById(
matchesCollectionRef,
Object.keys(matchIdMap),
(matchDoc) => matchDataMap[matchDoc.id] = matchDoc.data()
);
const batch = db.batch();
const queuedUpdates = 0;
betIdList.forEach(betId => {
const betData = betDataMap[betId];
const matchData = matchDataMap[betData.tradeMatchId];
if (matchData === undefined) {
console.log(`WARN: Skipped /bets/${betId}, because it's linked match doesn't exist!`);
continue;
}
if (matchData.matchStatus !== 'IN_PLAY') {
console.log(`INFO: Skipped /bets/${betId}, because it's linked match status is not "IN_PLAY" (actual value: ${matchData.matchStatus})`);
continue;
}
const betRef = betsCollectionRef.doc(betId);
const betUserRef = usersCollectionRef.doc(betData.user);
batch.update(betUserRef, { accountBalance: admin.firestore.FieldValue.increment(Number(betData.stake)) });
batch.update(betRef, { tradeCancelled: true });
queuedUpdates += 2; // for logging
});
console.log(`INFO: Batch currently has ${queuedUpdates} queued`);
// only uncomment when you are ready to make changes
// batch.commit();
}
Usage:
axios.request(options)
.then(function(response) {
const data = response.data;
console.log('INFO: Total matches count from API:' + data.matches.length);
return applyBalanceChanges();
}
In function called "readPlcDataWriteToDB" i need to wait for data come before return a response. How can i do it ?
i am getting an error in this code about this problem. When i add "await" like "newData = await data;" it is same result no effects.
Please help me for solve this. Tnx..
const asyncErrorWrapper = require("express-async-handler");
var nodes7 = require('nodes7'); // This is the package name, if the repository is cloned you may need to require 'nodeS7' with uppercase S
var conn = new nodes7;
const MachineState = require("../models/MachineState");
var newData;
var doneReading = false;
var doneWriting = false;
var variables = {
nMachine1 : 'DB7,BYTE0',
nMachine2 : 'DB7,BYTE1',
nMachine3 : 'DB7,BYTE2',
nMachine4 : 'DB7,BYTE3',
nMachine5 : 'DB7,BYTE4',
nMachine6 : 'DB7,BYTE5',
nMachine7 : 'DB7,BYTE6',
nMachine8 : 'DB7,BYTE7',
nMachine9 : 'DB7,BYTE8',
nMachine10 : 'DB7,BYTE9',
nMachine11 : 'DB7,BYTE10',
nMachine12 : 'DB7,BYTE11',
nMachine13 : 'DB7,BYTE12',
nMachine14 : 'DB7,BYTE13',
nMachine15 : 'DB7,BYTE14'
};
var data;
conn.initiateConnection({port: 102, host: '192.168.200.1', rack: 0, slot: 1}, connected); // slot 2 for 300/400, slot 1 for 1200/1500
function connected(err) {
if (typeof(err) !== "undefined") {
// We have an error. Maybe the PLC is not reachable.
console.log(err);
process.exit();
}
conn.setTranslationCB(function(tag) {return variables[tag];}); // This sets the "translation" to allow us to work with object names
conn.addItems(['nMachine1' , 'nMachine2' , 'nMachine3' , 'nMachine4' , 'nMachine5' , 'nMachine6' , 'nMachine7' , 'nMachine8' , 'nMachine9' , 'nMachine10' , 'nMachine11' , 'nMachine12' , 'nMachine13' , 'nMachine14' , 'nMachine15']);
}
function valuesReady(anythingBad, values) {
if (anythingBad) { console.log("SOMETHING WENT WRONG READING VALUES!!!!"); }
//console.log(values);
console.log("Done reading.");
doneReading = true;
if (doneWriting) { process.exit(); }
data = values;
sendDataToDB(values);
}
const readPlcDataWriteToDB = asyncErrorWrapper(async (req,res,next) => {
await conn.readAllItems(valuesReady);
newData = data;
return res
.status(200)
.json({
success : true,
data : newData
});
});
const sendDataToDB = asyncErrorWrapper(async (req,res,next) => {
let allMachineStates = await MachineState.findOne();
allMachineStates.Machine.M1 = newData.nMachine1;
allMachineStates.Machine.M2 = newData.nMachine2;
allMachineStates.Machine.M3 = newData.nMachine3;
allMachineStates.Machine.M4 = newData.nMachine4;
allMachineStates.Machine.M5 = newData.nMachine5;
allMachineStates.Machine.M6 = newData.nMachine6;
allMachineStates.Machine.M7 = newData.nMachine7;
allMachineStates.Machine.M8 = newData.nMachine8;
allMachineStates.Machine.M9 = newData.nMachine9;
allMachineStates.Machine.M10 = newData.nMachine10;
allMachineStates.Machine.M11 = newData.nMachine11;
allMachineStates.Machine.M12 = newData.nMachine12;
allMachineStates.Machine.M13 = newData.nMachine13;
allMachineStates.Machine.M14 = newData.nMachine14;
allMachineStates.Machine.M15 = newData.nMachine15;
await allMachineStates.save();
console.log("PLC'den Alınan Verilere Göre Database Güncellendi");
});
module.exports = {
readPlcDataWriteToDB
};
enter image description here
Based on the documentation for nodes7, it appears that readAllItems does not return a Promise, but rather expects a callback. This means that await will not correctly wait for it, so the assignment to newData wouldn't work.
Either move the handling of newData to a callback, or try something like util.promisify to convert the library function to use Promises
It seems to me that you should put your await calls inside a try-catch structure. Like this:
try {
await myFunctionOne();
cons myConstOne = await myFunctionTwo();
.... whatever you need to put here ...
} catch(error) {
console.error(error);
}
From there you would get rid of the "Unhandled-Promise-Rejection" issue and you could see what is causing the problem.
Beside that, you may find useful these few tutorials about Async-Await, I myself learned from them and still sometimes refer to them.
For the following code, parameters are js objects whose structures are initialized as follows:
statePiece = {
field_name: { disabled: false, exampleValue: "arbitrary" },
field_name2: {
/* ... */
},
field_nameN: {
/* ... */
}
};
userField = "field_name_string";
sesarValues = {
format: "one2one",
selectedField: "latitude",
disabledSelf: true,
addField: 0
};
This function works correctly and returns the modified statePiece as returnTemp the first time a particular statePiece.field_name is modified
export let setUserField = (statePiece, userField, sesarValues) => {
console.log("set user field", userField, "set mappval", sesarValues);
var temp = { ...statePiece }; //(this.state.fields[each].mappedTo != null) ? (this.state.fields[userField].mappedTo) : [];
var XUnit = statePiece[userField];
if (typeof userField != "string") {
console.log("not string");
for (var each of userField) {
if (sesarValues) {
temp[each].mappedTo = sesarValues.selectedField;
temp[each].disabled = true;
} else {
temp[each].disabled = !temp[each].disabled;
delete temp[each].mappedTo;
}
}
} else {
//is string
console.log("is string");
console.log(XUnit);
if (sesarValues) {
if (XUnit.disabled === true) XUnit.disabled = false;
console.log("1");
console.log(XUnit);
XUnit.disabled = true;
console.log(XUnit);
XUnit.mappedTo = sesarValues.selectedField;
} else {
console.log("2");
temp[userField].disabled = !temp[userField].disabled;
delete temp[userField].mappedTo;
}
}
let returnTemp = { ...temp, [userField]: XUnit };
console.log("set UF debug ", returnTemp);
console.log(returnTemp["FACILITY_CODE"]);
return returnTemp;
};
But after that, changing the statePiece.userField.mappedTo value fails to alter the object property. Or at least alter it permanently. When I console log the returnTemp variable, I see the entry has lost its mappedTo entry(as should happen) without it being replaced with the new userField.
However, when I console.log(returnTemp[userField]) it shows the entry values with the expected mappedTo key: value pair.
Not sure what's going on here.
From the usage of userField, I can work out that it could be an Array or a String.
However you have done something curious with it in the following expression:
var XUnit = statePiece[userField];
Given userField is a String, the above expression is fine.
However, where it is an array, XUnit will be undefined.
Also doing the same where userField is an Array in the following line means that you're setting the userField.toString() as a key mapped to undefined.
let returnTemp = { ...temp, [userField]: XUnit };
I'd assign XUnit where the condition checks out that userField is a String and just return temp.
else {
//is string
var XUnit = statePiece[userField];
//...
}
return temp;
I may be missing something basic as why is it happening.
GET: example.com/users
//gives all data
GET: example.com/users?status=1
//gives data with status = 1
GET: example.com/users // this does not work
gives same data as pervious API condition with status=1
On third hit, self.whereObj is not initialising to default empty object instead it takes previous value of {'status' = '1'}, however self.page and self.limit is taking default value if no query parameter is provided in query string.
example.com/users?limit=3, // takes override to 3 form default value of 5
example.com/users // self.limit takes default 5 and this works fine
So my question is why the self.limit (simple string variable) is initialising however self.whereObj is not ?
var Bookshelf = require('../../dbconfig').bookshelf;
Bookshelf.Collection = Bookshelf.Collection.extend({
limit: 5,
page: 1,
whereObj: {}
myFetch: function (query_params,expectedWhereFields) {
var self = this;
var whereObj = self.whereObj ; // this is not initializing
// var whereObj = {}; this is initialising
var page = self.page;
var limit = self.limit; //this is not showing nay initialisation error
for (var x in query_params) {
if (expectedWhereFields.includes(x)) {
whereObj[x] = query_params[x];
}
if (x === 'page') {
page = query_params[x];
}
if (x === 'limit') {
limit = query_params[x];
}
}
var offset = (page - 1) * limit;
function fetch() {
return self.constructor.forge()
.query({where: whereObj})
.query(function (qb) {
qb.offset(offset).limit(limit);
})
.then(function (collection) {
return collection;
})
.catch(function (err) {
return err
});
}
return new fetch();
}
});
module.exports = Bookshelf;
UPDATED
service.js
var Model = require('./../models/Users');
var express = require('express');
var listUsers = function (query_params, callback) {
var expectedWhereFields = ["type", "status", "name"];
Model.Users
.forge()
.myFetch(query_params, expectedWhereFields)
.then(function (collection) {
return callback(null, collection);
})
.catch(function (err) {
return callback(err, null);
});
};
module.exports = {
listUsers: listUsers
};
model/Users.js
var Bookshelf = require('../../dbconfig').bookshelf;
var Base = require('./base');
// Users model
var User = Bookshelf.Model.extend({
tableName: 'user_table'
});
var Users = Bookshelf.Collection.extend({
model: User
});
module.exports = {
User: User,
Users: Users
};
So my question is why the self.limit (simple string variable) is initialising however self.whereObj is not?
Because objects are reference values. When you set var whereObj = self.whereObj;, both refer to the same object, and when you copy the query parameters into the object properties you are effectively writing into your defaults instance. This does not happen with primitive values such as strings - they don't have mutable properties.
I have a txt file contains:
{"date":"2013/06/26","statement":"insert","nombre":1}
{"date":"2013/06/26","statement":"insert","nombre":1}
{"date":"2013/06/26","statement":"select","nombre":4}
how I can convert the contents of the text file as array such as:
statement = [
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"select","nombre":4}, ];
I use the fs module node js. Thanks
Sorry
I will explain more detailed:
I have an array :
st = [
{"date":"2013/06/26","statement":"insert","nombre":1},
{"date":"2013/06/26","statement":"insert","nombre":5},
{"date":"2013/06/26","statement":"select","nombre":4},
];
if I use this code :
var arr = new LINQ(st)
.OrderBy(function(x) {return x.nombre;})
.Select(function(x) {return x.statement;})
.ToArray();
I get the result I want.
insert select insert
but the problem my data is in a text file.
any suggestion and thanks again.
There is no reason for not to do your file parser yourself. This will work on any size of a file:
var fs = require('fs');
var fileStream = fs.createReadStream('file.txt');
var data = "";
fileStream.on('readable', function() {
//this functions reads chunks of data and emits newLine event when \n is found
data += fileStream.read();
while( data.indexOf('\n') >= 0 ){
fileStream.emit('newLine', data.substring(0,data.indexOf('\n')));
data = data.substring(data.indexOf('\n')+1);
}
});
fileStream.on('end', function() {
//this functions sends to newLine event the last chunk of data and tells it
//that the file has ended
fileStream.emit('newLine', data , true);
});
var statement = [];
fileStream.on('newLine',function(line_of_text, end_of_file){
//this is the code where you handle each line
// line_of_text = string which contains one line
// end_of_file = true if the end of file has been reached
statement.push( JSON.parse(line_of_text) );
if(end_of_file){
console.dir(statement);
//here you have your statement object ready
}
});
If it's a small file, you might get away with something like this:
// specifying the encoding means you don't have to do `.toString()`
var arrayOfThings = fs.readFileSync("./file", "utf8").trim().split(/[\r\n]+/g).map(function(line) {
// this try/catch will make it so we just return null
// for any lines that don't parse successfully, instead
// of throwing an error.
try {
return JSON.parse(line);
} catch (e) {
return null;
}
// this .filter() removes anything that didn't parse correctly
}).filter(function(object) {
return !!object;
});
If it's larger, you might want to consider reading it in line-by-line using any one of the many modules on npm for consuming lines from a stream.
Wanna see how to do it with streams? Let's see how we do it with streams. This isn't a practical example, but it's fun anyway!
var stream = require("stream"),
fs = require("fs");
var LineReader = function LineReader(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
this._buffer = "";
};
LineReader.prototype = Object.create(stream.Transform.prototype, {constructor: {value: LineReader}});
LineReader.prototype._transform = function _transform(input, encoding, done) {
if (Buffer.isBuffer(input)) {
input = input.toString("utf8");
}
this._buffer += input;
var lines = this._buffer.split(/[\r\n]+/);
this._buffer = lines.pop();
for (var i=0;i<lines.length;++i) {
this.push(lines[i]);
}
return done();
};
LineReader.prototype._flush = function _flush(done) {
if (this._buffer.length) {
this.push(this._buffer);
}
return done();
};
var JSONParser = function JSONParser(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
};
JSONParser.prototype = Object.create(stream.Transform.prototype, {constructor: {value: JSONParser}});
JSONParser.prototype._transform = function _transform(input, encoding, done) {
try {
input = JSON.parse(input);
} catch (e) {
return done(e);
}
this.push(input);
return done();
};
var Collector = function Collector(options) {
options = options || {};
options.objectMode = true;
stream.Transform.call(this, options);
this._entries = [];
};
Collector.prototype = Object.create(stream.Transform.prototype, {constructor: {value: Collector}});
Collector.prototype._transform = function _transform(input, encoding, done) {
this._entries.push(input);
return done();
};
Collector.prototype._flush = function _flush(done) {
this.push(this._entries);
return done();
};
fs.createReadStream("./file").pipe(new LineReader()).pipe(new JSONParser()).pipe(new Collector()).on("readable", function() {
var results = this.read();
console.log(results);
});
fs.readFileSync("myfile.txt").toString().split(/[\r\n]/)
This gets your each line as a string
You can then use UnderscoreJS or your own for loop to apply the JSON.parse("your json string") method to each element of the array.
var arr = fs.readFileSync('mytxtfile', 'utf-8').split('\n')
I think this is the simplest way of creating an array from your text file