I've read through the Firebase Cloud Functions reference, guides and sample code to try and determine why my function is triggered twice, but am yet to find a successful resolution. I've also trialed Firebase-Queue as a work-around, however its latest update suggests Cloud Functions is the way to go.
In short, I'm retrieving notices from an external API using request-promise, checking those notices against ones I already have in my database, and when a new notice is identified, posting it to said database. The corresponding venue is then updated with a reference to the new notice. Code is as follows:
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const request = require('request');
const rp = require('request-promise');
admin.initializeApp(functions.config().firebase);
const db = admin.database();
const venues = db.ref("/venues/");
exports.getNotices = functions.https.onRequest((req, res) => {
var options = {
uri: 'https://xxxxx.xxxxx',
qs: {
format: 'json',
type: 'venue',
...
},
json: true
};
rp(options).then(data => {
processNotices(data);
console.log(`venues received: ${data.length}`);
res.status(200).send('OK');
})
.catch(error => {
console.log(`Caught Error: ${error}`);
res.status(`${error.statusCode}`).send(`Error: ${error.statusCode}`);
});
});
function processNotices(data) {
venues.once("value").then(snapshot => {
snapshot.forEach(childSnapshot => {
var existingKey = childSnapshot.val().key;
for (var i = 0; i < data.length; i++) {
var notice = data[i];
var noticeKey = notice.key;
if (noticeKey !== existingKey) {
console.log(`New notice identified: ${noticeKey}`)
postNotice(notice);
}
}
return true;
});
});
}
function postNotice(notice) {
var ref = venues.push();
var key = ref.key;
var loc = notice.location;
return ref.set(notice).then(() => {
console.log('notice posted...');
updateVenue(key, loc);
});
}
function updateVenue(key, location) {
var updates = {};
updates[key] = "true";
var venueNoticesRef = db.ref("/venues/" + location + "/notices/");
return venueNoticesRef.update(updates).then(() => {
console.log(`${location} successfully updated with ${key}`);
});
}
Any suggestions as to how to rectify the double-triggering would be greatly appreciated. Thanks in advance!
Problem solved - some misinformation from the Firebase Console Logs (repeating entries), coupled with nested for loops in the wrong order were responsible for the apparent double triggering.
Related
I have a Next.js application here which needs to read a CSV file from a URL in the same repo in multiple places, but I cannot seem to be able to retrieve this data. You can find the relevant file in my repo here.
Note, the URL I'm trying to pull data from is this: https://raw.githubusercontent.com/ivan-rivera/balderdash-next/main/public/test_rare_words.csv
Here is what I've tried so far:
Approach 1: importing the data
let vocab = {};
...
async function buildVocab() {
const words = await import(VOCAB_URL); // this works when I point to a folder in my directory, but it does not work when I deploy this app. If I point at the URL address, I get an error saying that it cannot find the module
for (let i = 0; i < words.length; i++) {
vocab[words[i].word] = words[i].definition;
}
}
Approach 2: papaparse
const papa = require("papaparse");
let vocab = {};
...
export async function buildVocab() {
await papa.parse(
VOCAB_URL,
{
header: true,
download: true,
delimiter: ",",
step: function (row) {
console.log("Row:", row.data); // this prints data correctly
},
complete: function (results) {
console.log(results); // this returns an object with several attributes among which is "data" and "errors" and both are empty
},
}
);
// this does not work because `complete` does not return anything
vocab = Object.assign({}, ...raw.map((e) => ({ [e.word]: e.definition })));
console.log(vocab);
}
Approach 3: needle
const csvParser = require("csv-parser");
const needle = require("needle");
let vocab = {};
...
let result = [];
needle
.get(VOCAB_URL)
.pipe(csvParser())
.on("data", (data) => {
result.push(data);
});
vocab = Object.assign({}, ...result.map((e) => ({ [e.word]: e.definition })));
// This approach also returns nothing, however, I noticed that if I force it to sleep, then I do get the results I want:
setTimeout(() => {
console.log(result);
}, 1000); // now this prints the data I'm looking for
What I cannot figure out is how to force this function to wait for needle to retrieve the data. I've declared it as an async function and I'm calling it with await buildVocab() but it doesn't help.
Any ideas how I can fix this? Sorry, I'm a JS beginner, so it's probably something fundamental that I'm missing :(
After spending hours on this, I think I finally found a solution:
let vocab = {};
export async function buildVocab() {
await fetch(VOCAB_URL)
.then((resp) => resp.text())
.then((text) => {
papa.parse(text, { header: true }).data.forEach((row) => {
vocab[row.word] = row.definition;
});
});
}
The only oddity that I still can't work out is this: I'm calling my buildVocab function inside another async function and I noticed that if I do not include a console.log statement in that function, then the vocab still does not get populated in time. Here is the function:
export async function sampleWord() {
await buildVocab();
const keys = Object.keys(vocab);
const index = Math.floor(Math.random() * keys.length);
console.log(`selected word: ${keys[index]}`); // this is important!
return keys[index];
}
I am writing a test which tests a firebase trigger. The problem, however, is that I cannot make it work.
I want to use the local firestore emulator and Jest in order to simulate a change in the firestore and see if the trigger does what it needs to do.
I require the cloud function in my test and I initialize my app
Setup.js:
const firebase = require('#firebase/testing');
const PROJECT_ID = 'project';
let admin;
let db;
const setupAdmin = async () => {
admin = firebase.initializeAdminApp({
projectId: PROJECT_ID
});
db = admin.firestore();
};
const getAdmin = () => {
return admin;
};
const getDb = () => {
return db;
};
module.exports.setupAdmin = setupAdmin;
module.exports.getAdmin = getAdmin;
module.exports.getDb = getDb;
Test.js
describe('Billing', () => {
let dbRef;
beforeAll(async () => {
const {db, admin} = require('../../../functions/helpers/setup');
dbRef = db;
});
afterAll(async () => {
await Promise.all(firebase.apps().map(app => app.delete()));
console.log(`View rule coverage information at ${COVERAGE_URL}\n`);
});
it('test', async () => {
const mockData = {
'Users/user1': {
uid: 'user1'
},
['Users/user1/Taxes/' + new Date().getFullYear().toString()]: {
totalExpenseEuro: 0
}
};
for (const key in mockData) {
const ref = dbRef.doc(key);
await ref.set(mockData[key]);
}
// Create mockup data
await dbRef.collection('Users').doc('user1').collection('Expenses').doc('expense1').set({
amountEuroInclVAT: 100
});
// Make snapshot for state of database beforehand
const beforeSnap = test.firestore.makeDocumentSnapshot({amountEuroInclVAT: 0}, 'Users/user1/Expenses/expense1');
// Make snapshot for state of database after the change
const afterSnap = test.firestore.makeDocumentSnapshot(
{amountEuroInclVAT: 100},
'Users/user1/Expenses/expense1'
);
const change = test.makeChange(beforeSnap, afterSnap);
// Call wrapped function with the Change object
const wrapped = test.wrap(calculateTaxesOnExpenseUpdate);
wrapped(change, {
params: {
uid: 'test1'
}
});
});
});
Now the main problem comes when I try to access this db object in my trigger
const calculateTaxesOnExpenseUpdate = functions.firestore
.document('Users/{uid}/Expenses/{expenseId}')
.onWrite(async (change, context) => {
const {getDb} = require('../helpers/setup'); // This setup is the same as above
let db = getDb();
...
For some reason when I perform an action like (await db.collection('Users').get()).get('totalExpenseEuro'), Jest stops executing my code. When I set a debugger right after that line, it never gets printed. That piece of code crashes, and I have no idea why. I think the DB instance if not properly configured in my cloud trigger function.
Question: What is a good way of sharing the DB instance (admin.firestore()) between the test and the cloud trigger functions?
I'm new the Node.js and I've been working with a sample project by a third party provider and I'm trying to use Azure Key Vault to store configuration values.
I'm having trouble getting a process to wait before executing the rest. I'll try to detail as much as I know.
The sample project has a file named agent.js which is the start page/file. On line 16 (agent_config = require('./config/config.js')[process.env.LP_ACCOUNT][process.env.LP_USER]) it calls a config file with values. I'm trying to set these value using Key Vault. I've tried many combinations of calling functions, and even implementing async / await but the value for agent_config always contains a [Promise] object and not the data returned by Key Vault.
If I'm right, this is because the Key Vault itself uses async / await too and the config file returns before the Key Vault values are returned.
How can Key Vault be added/implemented in a situation like this?
Here's what I've tried:
First updated agent.js to
let agent_config = {};
try {
agent_config = require('./config/config.js')['123']['accountName'];
} catch (ex) {
log.warn(`[agent.js] Error loading config: ${ex}`)
}
console.log(agent_config);
Test 1
./config/config.js
const KeyVault = require('azure-keyvault');
const msRestAzure = require('ms-rest-azure');
const KEY_VAULT_URI = 'https://' + '{my vault}' + '.vault.azure.net/' || process.env['KEY_VAULT_URI'];
function getValue(secretName, secretVersion) {
msRestAzure.loginWithAppServiceMSI({ resource: 'https://vault.azure.net' }).then((credentials) => {
const client = new KeyVault.KeyVaultClient(credentials);
client.getSecret(KEY_VAULT_URI, secretName, secretVersion).then(
function (response) {
return response.Value;
});
});
}
module.exports = {
'123': {
'accountName': {
accountId: getValue('mySecretName', '')
}
}
};
Results
{ accountsId: undefined }
Test 2
Made getValue an async function and wrapped it around another function (tried without the wrapping and didn't work either)
./config/config.js
const KeyVault = require('azure-keyvault');
const msRestAzure = require('ms-rest-azure');
const KEY_VAULT_URI = 'https://' + '{my vault}' + '.vault.azure.net/' || process.env['KEY_VAULT_URI'];
async function getValue(secretName, secretVersion) {
msRestAzure.loginWithAppServiceMSI({ resource: 'https://vault.azure.net' }).then((credentials) => {
const client = new KeyVault.KeyVaultClient(credentials);
client.getSecret(KEY_VAULT_URI, secretName, secretVersion).then(
function (response) {
return response.Value;
});
});
}
async function config() {
module.exports = {
'123': {
'accountName': {
accountId: await getValue('mySecretName', '')
}
}
};
}
config();
Results
{}
Test 3
Made getValue an async function and wrapped it around another function (tried without the wrapping and didn't work either)
./config/config.js
const KeyVault = require('azure-keyvault');
const msRestAzure = require('ms-rest-azure');
const KEY_VAULT_URI = 'https://' + '{my vault}' + '.vault.azure.net/' || process.env['KEY_VAULT_URI'];
async function getValue(secretName, secretVersion) {
return msRestAzure.loginWithAppServiceMSI({ resource: 'https://vault.azure.net' })
.then((credentials) => {
const client = new KeyVault.KeyVaultClient(credentials);
return client.getSecret(KEY_VAULT_URI, secretName, secretVersion).then(
function (response) {
return response.Value;
});
});
}
module.exports = {
'123': {
'accountName': {
accountId: getValue('mySecretName', '')
}
}
};
config();
Results
{ accountId: { <pending> } }
Other
I've tried many others ways like module.exports = async (value) =< {...} (found through other questions/solutions without success.
I'm starting to think I need to do some "waiting" on agent.js but I haven't found good info on this.
Any help would be great!
One issue is that your getValue function is not returning anything as your returns need to be explicit.
(and without the promise being returned, there's nothing to await on)
async function getValue(secretName, secretVersion) {
return msRestAzure.loginWithAppServiceMSI({ resource: 'https://vault.azure.net' })
.then((credentials) => {
const client = new KeyVault.KeyVaultClient(credentials);
return client.getSecret(KEY_VAULT_URI, secretName, secretVersion).then(
function (response) {
return response.Value;
});
});
}
You could also get away with less explicit returns using arrow functions..
const getValue = async (secretName, secretVersion) =>
msRestAzure.loginWithAppServiceMSI({ resource: 'https://vault.azure.net' })
.then(credentials => {
const client = new KeyVault.KeyVaultClient(credentials);
return client.getSecret(KEY_VAULT_URI, secretName, secretVersion)
.then(response => response.Value);
});
Introducing the Azure Key Vault read, which is async, means your whole config read is async. There' nothing you can do to get around that. This will mean that the code that uses the config will need to handle it appropriately. You start by exporting an async function that will return the config..
async function getConfig() {
return {
'123': {
'accountName': {
accountId: await getValue('mySecretName', '')
}
}
};
}
module.exports = getConfig;
In your agent code you call that function. This will mean that your agent code will need to be wrapped in a function too, so maybe something like this..
const Bot = require('./bot/bot.js');
const getConfig = require('./config/config.js');
getConfig().then(agentConfig => {
const agent = new Bot(agentConfig);
agent.on(Bot.const.CONNECTED, data => {
log.info(`[agent.js] CONNECTED ${JSON.stringify(data)}`);
});
});
The package azure-keyvault has been deprecated in favor of the new packages to deal with Keyvault keys, secrets and certificates separately. For your scenario, you can use the new #azure/keyvault-secrets package to talk to Key Vault and the new #azure/identity package to create the credential.
const { SecretClient } = require("#azure/keyvault-secrets");
const { DefaultAzureCredential } = require("#azure/identity");
async function getValue(secretName, secretVersion) {
const credential = new DefaultAzureCredential();
const client = new SecretClient(KEY_VAULT_URI, credential);
const secret = await client.getSecret(secretName);
return secret.value;
}
The DefaultAzureCredential assumes that you have set the below env variables
AZURE_TENANT_ID: The tenant ID in Azure Active Directory
AZURE_CLIENT_ID: The application (client) ID registered in the AAD tenant
AZURE_CLIENT_SECRET: The client secret for the registered application
To try other credentials, see the readme for #azure/identity
If you are moving from the older azure-keyvault package, checkout the migration guide to understand the major changes
I am trying to modify a Node.js function called 'splunk-logger'. The problem is that when the SNS Message comes into the function, the events from the Anti-Virus (Trend Micro DeepSecurity) console are grouped together. I already contacted their support and they said this is just the way events are sent and they can't help.
Example: {Message {Event_1} {Event_2} {Event_3}}
Now the JavaScript function works great and the events are forwarded to Splunk. However, since they are grouped together BEFORE they even hit the Lambda function, Splunk sees them as 1 single event instead of 3.
My thought is to take the 'event' variable (since it contains the sns 'message') and parse through that to separate each event (probably using regex or something). Then, I can either create another function to send each event immediately or simply call the "logger.flushAsync" function to send them.
Link to splunk-dev explaining the funciton: http://dev.splunk.com/view/event-collector/SP-CAAAE6Y#create.
Here is the code from the index.js:
const loggerConfig = {
url: process.env.SPLUNK_HEC_URL,
token: process.env.SPLUNK_HEC_TOKEN,
};
const SplunkLogger = require('./lib/mysplunklogger');
const logger = new SplunkLogger(loggerConfig);
exports.handler = (event, context, callback) => {
console.log('Received event:', JSON.stringify(event, null, 2));
// Log JSON objects to Splunk
logger.log(event);
// Send all the events in a single batch to Splunk
logger.flushAsync((error, response) => {
if (error) {
callback(error);
} else {
console.log(`Response from Splunk:\n${response}`);
callback(null, event.key1); // Echo back the first key value
}
});
};
Here is the code from the mysplunklogger.js file.
'use strict';
const url = require('url');
const Logger = function Logger(config) {
this.url = config.url;
this.token = config.token;
this.addMetadata = true;
this.setSource = true;
this.parsedUrl = url.parse(this.url);
// eslint-disable-next-line import/no-dynamic-require
this.requester = require(this.parsedUrl.protocol.substring(0, this.parsedUrl.protocol.length - 1));
// Initialize request options which can be overridden & extended by consumer as needed
this.requestOptions = {
hostname: this.parsedUrl.hostname,
path: this.parsedUrl.path,
port: this.parsedUrl.port,
method: 'POST',
headers: {
Authorization: `Splunk ${this.token}`,
},
rejectUnauthorized: false,
};
this.payloads = [];
};
// Simple logging API for Lambda functions
Logger.prototype.log = function log(message, context) {
this.logWithTime(Date.now(), message, context);
};
Logger.prototype.logWithTime = function logWithTime(time, message, context) {
const payload = {};
if (Object.prototype.toString.call(message) === '[object Array]') {
throw new Error('message argument must be a string or a JSON object.');
}
payload.event = message;
// Add Lambda metadata
if (typeof context !== 'undefined') {
if (this.addMetadata) {
// Enrich event only if it is an object
if (message === Object(message)) {
payload.event = JSON.parse(JSON.stringify(message)); // deep copy
payload.event.awsRequestId = context.awsRequestId;
}
}
if (this.setSource) {
payload.source = `lambda:${context.functionName}`;
}
}
payload.time = new Date(time).getTime() / 1000;
this.logEvent(payload);
};
Logger.prototype.logEvent = function logEvent(payload) {
this.payloads.push(JSON.stringify(payload));
};
Logger.prototype.flushAsync = function flushAsync(callback) {
callback = callback || (() => {}); // eslint-disable-line no-param-reassign
console.log('Sending event(s)');
const req = this.requester.request(this.requestOptions, (res) => {
res.setEncoding('utf8');
console.log('Response received');
res.on('data', (data) => {
let error = null;
if (res.statusCode !== 200) {
error = new Error(`error: statusCode=${res.statusCode}\n\n${data}`);
console.error(error);
}
this.payloads.length = 0;
callback(error, data);
});
});
req.on('error', (error) => {
callback(error);
});
req.end(this.payloads.join(''), 'utf8');
};
module.exports = Logger;
import requests
import re
import json
import os
def lambda_handler(event, context):
data = json.dumps(event)
EventIds = re.findall(r'{\\\".+?\\\"}', data)
EventLength = len(EventIds)
headers = {'Authorization': 'Splunk ' + os.environ['SPLUNK_HEC_TOKEN']}
i = 0
while i < EventLength:
response = requests.post(os.environ['SPLUNK_HEC_URL'], headers=headers, json={"event":EventIds[i]}, verify=True)
i+=1
Arrays are the data type used when Deep Security 10.0 or newer sends events to Amazon SNS. But Splunk wants one event per message. So don't send the array directly.
Instead, use the Splunk logger or Lambda to iterate through the array, sending each item as an individual message. You can modify this sample Lambda script for Node.js:
https://github.com/deep-security/amazon-sns/blob/master/lambda-save-ds-event-to-s3.js
It sends events to S3 individually (which is what you need). Just change it to send to Splunk instead.
Disclosure: I work for Trend Micro.
I am trying to write a firebase function that returns a json dump of my entire firebase database (or even just the /Reports node). Why does the below just return a blank?
app.get('/export', (req, res) => {
var db = admin.database();
var fullDataDump;
db.ref('Reports').on('value', function(snapshot) {
fullDataDump = snapshotToArray(snapshot);
});
return res.status(200).json(fullDataDump);
});
function snapshotToArray(snapshot) {
var returnArr = [];
snapshot.forEach(function(childSnapshot) {
var item = childSnapshot.val();
item.key = childSnapshot.key;
returnArr.push(item);
});
return returnArr;
};
Data is loaded from Firebase (and pretty much any modern web API) asynchronously. By the time you send the response, the data hasn't returend from Firebase yet.
That's the reason why you have to specify a callback to Firebase when you register your listener: that way Firebase can invoke your code once it's done loading the data.
The solution is to move the code that sends the response to the client into the callback:
app.get('/export', (req, res) => {
var db = admin.database();
var fullDataDump;
db.ref('Reports').once('value', function(snapshot) {
fullDataDump = snapshotToArray(snapshot);
res.status(200).json(fullDataDump);
});
});
function snapshotToArray(snapshot) {
var returnArr = [];
snapshot.forEach(function(childSnapshot) {
var item = childSnapshot.val();
item.key = childSnapshot.key;
returnArr.push(item);
});
return returnArr;
};
You'll note that I also changed on('value' to once('value', since you're only interested in getting the data one time.