I am using meteor-job-collection (https://github.com/vsivsi/meteor-job-collection); however, I cannot create a job.
I have a method defined as
Meteor.methods( {
insertItems: function ( dataArray ) {
check( dataArray, [ Object ] );
dataArray.forEach( function ( element ) {
[...]
} );
}
} );
but the method is very memory demanding, so I want to wrap it inside a job. How do I start this job?
I have tried
var job = new Job( Jobs, 'insertItems', data ).priority( 'normal' ).retry(
{
retries: 5,
wait: 15 * 60 * 1000
}
).delay( 60 * 60 * 1000 ).save();
but I get the error
Error invoking Method 'jobQueue_jobSave': Internal server error [500]
My Job Collection is defined as
Jobs = JobCollection( 'jobQueue' );
if ( Meteor.isServer ) {
Jobs.allow( {
admin: function ( userId, method, params ) {
return true;
},
} );
}
Multiple things are missing from your code.
1 - Make sure you start the job server before submitting any jobs. Call startJobServer() on the jobCollection on the server.
Jobs = JobCollection( 'jobQueue' );
if ( Meteor.isServer ) {
Jobs.allow( {
admin: function ( userId, method, params ) {
return true;
},
} );
Jobs.startJobServer();
}
2 - You need to implement the processing of the job. A job is nothing more than a label with data attached scheduled to run at a certain point in time. The handler implements the job logic. In your case you would need something like this:
var workers = Job.processJobs('jobQueue', 'insertItems',
function (job, cb) {
insertData = job.data;
// do anything with the job data here.
// when done, call job.done() or job.fail()
job.done(); // when done successfully
job.fail("with reason or error"); //when failing
// Be sure to invoke the callback
// when work on this job has finished
cb();
}
);
Related
Consider the following exerpt from this example in the InfluxDB documentation:
const fluxObserver = {
next(row, tableMeta) {
const o = tableMeta.toObject(row)
console.log(
`${o._time} ${o._measurement} in ${o.region} (${o.sensor_id}): ${o._field}=${o._value}`
)
},
error(error) {
console.error(error)
console.log('\nFinished ERROR')
},
complete() {
console.log('\nFinished SUCCESS')
}
}
/** Execute a query and receive line table metadata and rows. */
queryApi.queryRows(fluxQuery, fluxObserver)
The fluxObserver defines a sequence of operations at different stages of execution of the query, all of which are just console.log calls. I can imagine that people want to do something similar in a way that is actually useful, like for instance return an array that contains the result of the query, instead of logging something to a console. My problem is that queryApi.queryRows does not actually return anything, so it is not possible to do something like:
...
complete() {
console.log('\nFinished SUCCESS')
return data;
}
}
/** Execute a query and receive line table metadata and rows. */
var result = queryApi.queryRows(fluxQuery, fluxObserver);
return result;
So how can I return the data collected by the fluxObserver?
I am building a like-counter in a NextJS ReactJS app, which stores a persistent incrementable count value. The max-count of clicks / likes is to be set to 100. The button will not increment the count any further than this value. The like-counter value is stored in a FaunaDB.
I can render the value stored in the db on the front-end, but I can't get the onClick handler to update the value stored in the db.
Here is the (commented) code so far:
Here, State is declared via a useState hook.
const [likes, setLikes] = useState([]);
The updateLikes function
async function updateLikes() {
await fetch("/api/newLikes", requestOptionsLikes)
.then(() => getLikes())
.catch((e) => console.log(e))
}
The PUT method is instantiated:
const requestOptionsLikes = {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ likes })
}
A useEffect hook:
useEffect(() => {
getLikes();
}, []);
The click function:
const handleSubmitLikes = (e) => {
e.preventDefault();
updateLikes();
}
The JSX that renders the clickable LIKE button and displays the value from the db.
<div>
<div onClick={handleSubmitLikes}>LIKE</div>
</div>
{
likes.map((d) => (
<div>
<div>{d.data.like}</div>
</div>
))}
</div>
The code is correctly fetching the likes from the DB so I think that there is no need to display the getLikes API. However the newLikes API is returning a 500 Internal Server Error, so I display it below:
const faunadb = require("faunadb");
// your secret hash
const secret = process.env.FAUNADB_SECRET_KEY;
const q = faunadb.query;
const client = new faunadb.Client({ secret });
console.log("CALLLED API")
module.exports = async (req, res) => {
const formData = req.body.data;
console.log("API CALL");
try {
const dbs = await client.query(
console.log("API UPDATE"),
console.log(formData),
q.Update(
q.Ref(q.Collection('Likes'), '305446225474224651'),
{
data: {
like: formData.likes[0].data.like + 1
},
}
)
);
// ok
res.status(200).json(dbs.data);
} catch (e) {
// something went wrong
res.status(500).json({ error: e.message });
}
};
Basically, I can't update the Database via the PUT method.
Your demonstrated code appears to increment that value provided in the PUT request, which potentially allows the current client to specify any value. There is also no evidence that you are capping the count at 100.
You can perform the counter increment entirely in FQL with a query like this:
q.Let(
{
counterRef: q.Ref(q.Collection('Likes'), '305446225474224651'),
counterDoc: q.Get(q.Var('counterRef'),
counterVal: q.Select(['data', 'like'], q.Var('counterDoc')
newCounterVal: q.If(
q.GTE(q.Var('counterVal'), 100),
100,
q.Add(q.Var('counterVal'), 1)
),
update: q.If(
q.Equals(q.Var('counterVal'), q.Var('newCounterVal')),
null,
q.Update(
q.Var('counterRef'),
{ data: { like: q.Var('newCounterVal') } }
)
)
},
q.Var('newCounterVal')
)
This uses Let to assign some named values to make it easy to re-use values. newCounter value is set to the incremented counterVal only when counterVal is less than 100. Updating the counter only happens when the counter actually changes (e.g. for values less than 100). The query returns the value of newCounterVal.
Ideally, you'd embed this query in a user-defined function (UDF). Whenever your rule to cap counts at 100 needs to change, only the UDF needs to be updated, and not every location in your application code where 100 might appear.
This was my solution. I found the FaunaDB forums build issues like this and the related. The below solution will increment the value by 1.
q.Update(
q.Ref(q.Collection('Likes'), '__ref_id__'),
{
data: {
like: q.Add(
q.Select(
['data', 'like'],
q.Get(
q.Ref(
q.Collection('Likes'),
'__ref_id__'
)
)
),
1
)
}
},
)
The counter button itself, written in NextJS / ReactJS, caps the functionality of the button to a null onClick for values beyond 100.
<div onClick={d.data.like < 100 ? handleSubmitLikes : null}
handleSubmitLikes access the API.
I encounter two problems regarding the transaction which needs to be solved in JavaScript.
Let's say some transaction requests are going to happen after (2seconds, 5seconds, 6seconds, 9seconds, 12seconds, 16seconds...).
I need to write a simple function that makes sure there is at least a 5sec interval between two requests and ignores others. So in this example, only transactions at 2seconds, 9seconds, and 16seconds are accepted.
Another problem I had is that writing a function that only accepts a request at 0s, 5s, 10s, 15s...whatever is closer to the 5s mark and ignore others. This time, the function should accept transactions at 2sec, 5sec, 9sec, and 16sec and ignore others.
`
setTimeout(transaction(2), 2000);
setTimeout(transaction(5), 5000);
setTimeout(transaction(6), 6000);
setTimeout(transaction(9), 9000);
setTimeout(transaction(12), 12000);
setTimeout(transaction(16), 16000);
//first problem: print 2,9,16
//second problem: print 2,5,9,16
`
I have an idea to solve these two problems using setTimeout/setInterval/Closure but I'm not sure about how I can integrate them together.
Use RxJS's debounceTime:
https://www.learnrxjs.io/operators/filtering/debouncetime.html
Discard emitted values that take less than the specified time between output
let transactions$ = getObservableForTransactions();
transactions$
.pipe(
debounceTime( 5000 )
)
.subscribe( txn => console.log( txn ) );
Demonstration
For your use-case specifically, I don't know where your "transactions" are coming from - but here's a more fleshed-out example using debounceTime:
// Step 1: Input:
let input = [
{ value: 2, delay: 2000 },
{ value: 5, delay: 5000 },
{ value: 6, delay: 6000 },
{ value: 9, delay: 9000 },
{ value: 12, delay: 12000 },
{ value: 16, delay: 16000 }
];
// Step 2: Set-up an Observable Subject:
let obs$ = new Subject<number>();
// Set-up an initial anonymous subscriber to print all values (without debouncing):
obs$.subscribe( {
next: v => console.log( 'value emitted: %o', v );
} );
// Step 3: Set-up the debounced subscriber:
obs$.pipe( debounceTime( 5000 ) )
.subscribe( { next: v => console.log( 'Debounced value emitted: %o', v ); } );
// Step 4: Spam `setTimeout` to trigger the process:
for( var item in input ) {
setTimeout( () => obs$.next( item.value, item.delay );
}
// Step 5: Open your browser console and view the results in real-time.
Wasted 3 hours already - trying to run transaction in firebase but it always fails with maxretry.
Basically my flow is:
On client:
call firebase http cloud funciton using js sdk.
On server:
read data from data base
update/invalidate data if needed
write the data back using transaction
The problem - step 4 always fail with maxretry.
The simplified code:
exports.myMethod2 = functions.https.onCall((data, context) => {
utilsModule.throwIfNoAuth(context);
return methodWithTransaction(context, admin);
});
exports.methodWithTransaction = function (context, admin) {
//this is called only once from outside!
return rootRef.child("my/data/path").once("value")
.then(function (indexRef) {
var usageObj = indexRef.val();
//update object's state
usageObj =
{
timestampUtcArray: [1530656089821, 1530656089822],
itemsCount: 2,
writeTimestamp: 1530656502759
}
return usageObj;
})
.then(function (usageObj) {
//her eis the sad part...
return rootRef.child("my/data/path").transaction(data => {
//this method always run 25 times and fails.
//data aways != usageObj though accross the calls usageObj are the same as well data are the same:
// usageObj =
// {
// timestampUtcArray: [1530656089821, 1530656089822],
// itemsCount: 2,
// writeTimestamp: 1530656502759 <-- only timestamp is different
// }
// data =
// {
// timestampUtcArray: [1530656089821, 1530656089822],
// itemsCount: 2,
// writeTimestamp: 1530656503421 <-- only timestamp is different
// }
return usageObj;
}
})
}
Any input s much appreciated as this seems to be a trivial task but I just stuck...
I'm using npm-twit to get followers of a specific account.
The Twitter API returns up to 5000 results from a single GET request.
If the user I'm querying has over 5000 followers a "next_cursor" value is returned with the data.
To get the next 5000 results, I need to re-run the GET function, passing it the "next_cursor" value as an argument. I just can't seem to work out how to do it.
I was thinking a while loop, but I can't reset the global variable, I think because of scope:
var cursor = -1
while ( cursor != 0 ) {
T.get('followers/ids', { screen_name: 'twitter' }, function (err, data, response) {
// Do stuff here to write data to a file
cursor = data["next_cursor"];
})
}
Obviously I'm not a JS genius, so any help would be much appreciated.
The issue you are having is due to Node.js being asynchronous.
T.get('followers/ids', { screen_name: 'twitter' }, function getData(err, data, response) {
// Do stuff here to write data to a file
if(data['next_cursor'] > 0) T.get('followers/ids', { screen_name: 'twitter', next_cursor: data['next_cursor'] }, getData);
})
}
Please note:
I gave a name to the internal callback function. That is so that we can recursively call it from the inside.
The loop is replaced with a recursive callback.
If there is a next_cursor data, then we call T.get using the same function getData.
Be aware that Do stuff here code will be executed many times (as many as there are next cursors). Since it is recursive callback - the order is guaranteed.
If you do not like the idea of recursive callbacks, you can avoid it by:
Finding out beforehand all the next_cursor's if possible, and generate requests using for loop.
Alternatively, use asynchronous-helper modules like Async (though for learning purposes, I would avoid modules unless you are fluent in the concept already).
Consider testing with some 5K+ account.
const T = new Twit(tokens)
function getFollowers (screenName, followers = [], cur = -1) {
return new Promise((resolve, reject) => {
T.get('followers/ids', { screen_name: screenName, cursor: cur, count: 5000 }, (err, data, response) => {
if (err) {
cur = -1
reject(err)
} else {
cur = data.next_cursor
followers.push(data.ids)
if (cur > 0) {
return resolve(getFollowers(screenName, followers, cur))
} else {
return resolve([].concat(...followers))
}
}
})
})
}
async function getXaqron () {
let result = await getFollowers('xaqron')
return result
}
console.log(getXaqron().catch((err) => {
console.log(err) // Rate limit exceeded
}))
Struggled with this one.. Everything seemed to work, but data['next_cursor'] didn't change, EVER!
Code should be like this:
T.get('followers/ids', { screen_name: 'twitter' }, function getData(err, data, response) {
// Do stuff here to write data to a file
if(data['next_cursor'] > 0) T.get('followers/ids', { screen_name: 'twitter', cursor: data['next_cursor'] }, getData);
})
}
Parameter for Twit isn't "next_cursor", it's just "cursor" ;)