How can I convert background jobs to something like functions? - javascript

I am making a newsreader app and using Parse.com background jobs to collect links from RSS feed of the newspaper. I have used xmlreader.js and sax.js to parse the httpResponse and using saveAll and beforeSave, periodically update the classes in data browser.
I have multiple newspapers with multiple categories making a total of more than 30 pairs, (I would have to later include more pair as I would like to include regional newspapers). Till now I was working with one newspaper and one category - The Hindu, sports category; and it is now working fine. Making copies of these two function and create jobs wont be efficient I think.
Therefore, I wanted to ask if I can convert both these jobs and beforeSave into some kind of function so that I can just pass in either newspaper-category pair class name or its url to do the stuff automatically.
Full Code - main.js
job -
Parse.Cloud.job("job_hindu_sports", function (request, response) {
return Parse.Cloud.httpRequest({
url: 'http://www.thehindu.com/sport/?service=rss'
}).then(function(httpResponse) {
var someXml = httpResponse.text;
xmlreader.read(someXml, function (err, res){
if(err) {
response.error("Error " +err);
return console.log(err);
}
var listArray = [];
res.rss.channel.item.each(function (i, item){
var hinduSports = new HinduSports(); //#startswithaj - this part
hinduSports.set("link", item.link.text());
hinduSports.set("title", item.title.text());
hinduSports.set("pubDate", item.pubDate.text());
//console.log("pubDate - "+ item.pubDate.text());
listArray.push(hinduSports);
});
var promises = [];
Parse.Object.saveAll(listArray, {
success: function(objs) {
promises.push(objs);
console.log("SAVED ALL!");
},
error: function(error) {
console.log("ERROR WHILE SAVING - "+error);
}
});
return Parse.Promise.when(promises);
});
}).then(function() {
response.success("Saving completed successfully.");
},function(error) {
response.error("Uh oh, something went wrong.");
});
});
beforeSave -
Parse.Cloud.beforeSave("HinduSports", function(request, response) {
//console.log("in beforeSave");
var query = new Parse.Query(HinduSports);
var linkText = request.object.get("link")
var titleText = request.object.get("title");
query.equalTo("link", linkText);
query.first({
success: function(object) {
//console.log("in query");
if (object) {
//console.log("found");
if(object.get('title')!==titleText){
console.log("title not same");
object.set("title", titleText);
response.success();
}
else{
console.log("title same");
response.error();
}
} else {
console.log("not found");
response.success();
}
},
error: function(error) {
response.error();
}
});
});

In your job code you could query your datastore for all of the URLS you want to process, and then iterate through the results requesting each url and passing the httpresponse to a function that does all the work
So you would have (pseudo code)
function getDataForNewspaper(id, url){
return (function(id) {
Parse.Cloud.httpRequest({
url: url
}).then(function(httpResponse){
processDataForNewspaper(id, httpResponse)
})
})(id) //you need to have this in a closure so you can pass id to processDataFor...
function processDataforNewpaper(id, httpResponse){
someXml = httpResponse.text
//process your xml here
}
Parse.Cloud.job("get_data_for_all_newspapers", function (request, response) {
var query = new Parse.Query("Get all the newspapers").find{
success: function(list){
for each newspaper in list then
getDataForNewspaper(newspaper.id, newspaper.url)
}
}
}
It's not the best explanation but I hope this helps

With the help from #startswithaj I modified my code to save all the articles in one class. The only thing left is to add a beforeSave method. But there is still a problem. saveAll gets completed only sometimes. For eg. I ran the code first time and got this in log :
I2014-04-26T18:18:40.036Z] v93: Ran job job_get_data_for_all_newspapers with:
Input: {}
Result: Saving completed successfully.
I2014-04-26T18:18:40.926Z] Successfully retrieved 2
I2014-04-26T18:18:40.926Z] getData NEW & CAT ID - 1, 5 feedUrl http://www.thehindu.com/sport/?service=rss
I2014-04-26T18:18:40.927Z] getData NEW & CAT ID - 1, 4 feedUrl http://www.thehindu.com/news/national/?service=rss
I2014-04-26T18:18:40.927Z] promisesGetNP [object Object],[object Object]
I2014-04-26T18:18:41.479Z] processData NEW & CAT ID - 1, 5
I2014-04-26T18:18:41.622Z] listArray http://www.thehindu.com/sport/other-sports/mankirat-singh-sets-record/article5951540.ece?utm_source=RSS_Feed&utm_medium=RSS&utm_campaign=RSS_Syndication
I2014-04-26T18:18:41.628Z] promises undefined
I2014-04-26T18:18:41.629Z] promisesGetData
I2014-04-26T18:18:41.629Z] Done getData?
I2014-04-26T18:18:42.082Z] processData NEW & CAT ID - 1, 4
I2014-04-26T18:18:42.311Z] listArray http://www.thehindu.com/news/national/muslim-women-entitled-to-maintenance-even-after-divorce-supreme-court/article5951562.ece?utm_source=RSS_Feed&utm_medium=RSS&utm_campaign=RSS_Syndication
I2014-04-26T18:18:42.324Z] promises undefined
I2014-04-26T18:18:42.324Z] promisesGetData
I2014-04-26T18:18:42.324Z] Done getData?
I2014-04-26T18:18:42.324Z] done job
and second time after deleting a few useless console.log I got this. You can see there is a SAVED ALL! which is called in the success: function of the saveAll -
I2014-04-26T18:20:53.130Z] v94: Ran job job_get_data_for_all_newspapers with:
Input: {}
Result: Saving completed successfully.
I2014-04-26T18:20:53.307Z] Successfully retrieved 2
I2014-04-26T18:20:53.307Z] getData NEW & CAT ID - 1, 5 feedUrl http://www.thehindu.com/sport/?service=rss
I2014-04-26T18:20:53.307Z] getData NEW & CAT ID - 1, 4 feedUrl http://www.thehindu.com/news/national/?service=rss
I2014-04-26T18:20:53.911Z] processData NEW & CAT ID - 1, 5
I2014-04-26T18:20:53.951Z] listArray http://www.thehindu.com/sport/other-sports/mankirat-singh-sets-record/article5951540.ece?utm_source=RSS_Feed&utm_medium=RSS&utm_campaign=RSS_Syndication
I2014-04-26T18:20:53.995Z] Done getData?
I2014-04-26T18:20:54.200Z] SAVED ALL!
I2014-04-26T18:20:54.818Z] processData NEW & CAT ID - 1, 4
I2014-04-26T18:20:55.016Z] listArray http://www.thehindu.com/news/national/muslim-women-entitled-to-maintenance-even-after-divorce-supreme-court/article5951562.ece?utm_source=RSS_Feed&utm_medium=RSS&utm_campaign=RSS_Syndication
I2014-04-26T18:20:55.031Z] Done getData?
I2014-04-26T18:20:55.031Z] done job
My new code can be found here. The new code starts at Line 150.

Related

How to avoid async.waterfall "TypeError: Cannot read property of undefined"?

I am working on an async.waterfall that I am unsure of how to avoid: "TypeError: Cannot read property 'MediaUrl' of undefined". This TypeError doesn't occur each time the script is run.
The flow is as follows:
getName (randomly chooses a name from a list of names
searchImage (uses the Bing search API to search photographs associated with that name
processBotdata (takes the results from Bing and randomly chooses one of the search results)
Step 3 is where the issue occurs:
function processBotdata (searchData, callback) {
var photographer = searchData.photographer // the search name
var array = searchData.array; // search results from bing
var randomIndex = Math.floor(Math.random() * array.length);
var mediaUrl = array[randomIndex].MediaUrl; // TypeError here!
var sourceUrl = array[randomIndex].SourceUrl;
var searchData = {
photographer,
mediaUrl,
sourceUrl
};
fs.readFile('results.json', function (err, data) {
var json = JSON.parse(data);
json.push(['search results for ' + photographer + ': ',
'mediaUrl: ' + searchData.mediaUrl,
'sourceUrl: ' + searchData.sourceUrl]);
fs.writeFile("results.json", JSON.stringify(json, null, "\t"));
console.log(' ==========> searchData appended to results.json file...')
});
console.log(' ==========> searchData has now been processed for upcoming tweet...');
setTimeout(function() {
callback(null, searchData);
console.log(searchData);
}, 5000);
}
I implemented a setTimeout for this function hoping that would resolve the issue. My thinking was that the Bing results searchData.array was not yet available to be processed, i.e. randomized and selected in this function. As I am new to Node.js and JavaScript I am unsure of my programming error here. I saw this post and I am wondering if it is related to the Bing Search Array which returns the top 50 results.
Update: here is how the async.waterfall is called:
async.waterfall([
getName,
async.retryable([opts = {times: 5, interval: 500}], searchImage),
async.retryable([opts = {times: 3, interval: 1000}], processBotdata),
async.retryable([opts = {times: 3, interval: 500}], getImage)
],
function(error, result) {
if (error) {
console.error(error);
return;
}
});
The most obvious possibility is that there is no object at the array index that you're getting. Since you're going after a random index, it makes sense that some random numbers are hitting populated indices, others are not. Is there any chance that the array is sparse or that the random number you're generating is out of range despite your attempts with Math.floor?
I'd check first in any case, something like:
var obj = array[randomIndex];
if(obj){
// do your stuff
}
/*** EDIT FOR NEW INFO ****/
Based on the comment, there's a chance that the Bing results will be an empty array, so the first thing you want to do is to test if that's the case and 'fail fast' to let async.retryable know it should retry:
function processBotData(searchData, callback){
if(!searchData || !searchData.array || !searchData.array.length)
return callback(new Error('No search results'));
// continue as before from here
}
Note that some folks would prefer my last condition to be something like searchData.array.length > 0, but 0 evals as false so I do it this way.

How to query objects in the CloudCode beforeSave?

I'm trying to compare a new object with the original using CloudCode beforeSave function. I need to compare a field sent in the update with the existing value. The problem is that I can't fetch the object correctly. When I run the query I always get the value from the sent object.
UPDATE: I tried a different approach and could get the old register ( the one already saved in parse). But the new one, sent in the request, was overridden by the old one. WHAT?! Another issue is that, even thought the code sent a response.success(), the update wasn't saved.
I believe that I'm missing something pretty obvious here. Or I'm facing a bug or something...
NEW APPROACH
Parse.Cloud.beforeSave('Tasks', function(request, response) {
if ( !request.object.isNew() )
{
var Task = Parse.Object.extend("Tasks");
var newTask = request.object;
var oldTask = new Task();
oldTask.set("objectId", request.object.id);
oldTask.fetch()
.then( function( oldTask )
{
console.log(">>>>>> Old Task: " + oldTask.get("name") + " version: " + oldTask.get("version"));
console.log("<<<<<< New Task: " + newTask.get("name") + " version: " + newTask.get("version"));
response.success();
}, function( error ) {
response.error( error.message );
}
);
}
});
OBJ SENT {"name":"LLL", "version":333}
LOG
I2015-10-02T22:04:07.778Z]v175 before_save triggered for Tasks for user tAQf1nCWuz:
Input: {"original":{"createdAt":"2015-10-02T17:47:34.143Z","name":"GGG","objectId":"VlJdk34b2A","updatedAt":"2015-10-02T21:57:37.765Z","version":111},"update":{"name":"LLL","version":333}}
Result: Update changed to {}
I2015-10-02T22:04:07.969Z]>>>>>> Old Task: GGG version: 111
I2015-10-02T22:04:07.970Z]<<<<<< New Task: GGG version: 111
NOTE: I'm testing the login via cURL and in the parse console.
CloudCode beforeSave
Parse.Cloud.beforeSave("Tasks", function( request, response) {
var query = new Parse.Query("Tasks");
query.get(request.object.id)
.then(function (oldObj) {
console.log("-------- OLD Task: " + oldObj.get("name") + " v: " + oldObj.get("version"));
console.log("-------- NEW Task: " + request.object.get("name") + " v: " + request.object.get("version"));
}).then(function () {
response.success();
}, function ( error) {
response.error(error.message);
}
);
});
cURL request
curl -X PUT \
-H "Content-Type: application/json" \
-H "X-Parse-Application-Id: xxxxx" \
-H "X-Parse-REST-API-Key: xxxxx" \
-H "X-Parse-Session-Token: xxxx" \
-d "{\"name\":\"NEW_VALUE\", \"version\":9999}" \
https://api.parse.com/1/classes/Tasks/VlJdk34b2A
JSON Response
"updatedAt": "2015-10-02T19:45:47.104Z"
LOG
The log prints the original and the new value, but I don't know how to access it either.
I2015-10-02T19:57:08.603Z]v160 before_save triggered for Tasks for user tAQf1nCWuz:
Input: {"original":{"createdAt":"2015-10-02T17:47:34.143Z","name":"OLD_VALUE","objectId":"VlJdk34b2A","updatedAt":"2015-10-02T19:45:47.104Z","version":0},"update":{"name":"NEW_VALUE","version":9999}}
Result: Update changed to {"name":"NEW_VALUE","version":9999}
I2015-10-02T19:57:08.901Z]-------- OLD Task: NEW_VALUE v: 9999
I2015-10-02T19:57:08.902Z]-------- NEW Task: NEW_VALUE v: 9999
After a lot test and error I could figure out what was going on.
Turn out that Parse is merging any objects with the same class and id into one instance. That was the reason why I always had either the object registered in DB or the one sent by the user. I honestly can't make sense of such behavior, but anyway...
The Parse javascript sdk offers an method called Parse.Object.disableSingeInstance link that disables this "feature". But, once the method is called, all object already defined are undefined. That includes the sent object. Witch means that you can't neither save the sent object for a later reference.
The only option was to save the key and values of the sent obj and recreate it later. So, I needed to capture the request before calling disableSingleInstance, transform it in a JSON, then disable single instance, fetch the object saved in DB and recreate the sent object using the JSON saved.
Its not pretty and definitely isn't the most efficient code, but I couldn't find any other way. If someone out there have another approach, by all means tell me.
Parse.Cloud.beforeSave('Tasks', function(request, response) {
if ( !request.object.isNew() ) {
var id = request.object.id;
var jsonReq;
var Task = Parse.Object.extend("Tasks");
var newTask = new Task;
var oldTask = new Task;
// getting new Obj
var queryNewTask = new Parse.Query(Task);
queryNewTask.get(id)
.then(function (result) {
newTask = result;
// Saving values as a JSON to later reference
jsonReq = result.toJSON();
// Disable the merge of obj w/same class and id
// It will also undefine all Parse objects,
// including the one sent in the request
Parse.Object.disableSingleInstance();
// getting object saved in DB
oldTask.set("objectId", id);
return oldTask.fetch();
}).then(function (result) {
oldTask = result;
// Recreating new Task sent
for ( key in jsonReq ) {
newTask.set( key, jsonReq[key]);
}
// Do your job here
}, function (error) {
response.error( error.message );
}
);
}
});
If I were you, I would pass in the old value as a parameter to the cloud function so that you can access it under request.params.(name of parameter). I don't believe that there is another way to get the old value. An old SO question said that you can use .get(), but you're claiming that that is not working. Unless you actually already had 9999 in the version...
edit - I guess beforeSave isn't called like a normal function... so create an "update version" function that passes in the current Task and the version you're trying to update to, perhaps?
Rather than performing a query, you can see the modified attributes by checking which keys are dirty, meaning they have been changed but not saved yet.
The JS SDK includes dirtyKeys(), which returns the keys that have been changed. Try this out.
var attributes = request.object.attributes;
var changedAttributes = new Array();
for(var attribute in attributes) {
if(object.dirty(attribute)) {
changedAttributes.push(attribute);
// object.get(attribute) is changed and the key is pushed to the array
}
}
For clarification, to get the original attribute's value, you will have to call get() to load those pre-save values. It should be noted that this will count as another API request.
Hey this worked perfectly for me :
var dirtyKeys = request.object.dirtyKeys();
var query = new Parse.Query("Question");
var clonedData = null;
query.equalTo("objectId", request.object.id);
query.find().then(function(data){
var clonedPatch = request.object.toJSON();
clonedData = data[0];
clonedData = clonedData.toJSON();
console.log("this is the data : ", clonedData, clonedPatch, dirtyKeys);
response.success();
}).then(null, function(err){
console.log("the error is : ", err);
});
For those coming to this thread in 2021-ish, if you have the server data loaded in the client SDK before you save, you can resolve this issue by passing that server data from the client SDK in the context option of the save() function and then use it in the beforeSave afterSave cloud functions.
// eg JS client sdk
const options = {
context: {
before: doc._getServerData() // object data, as loaded
}
}
doc.save(null, options)
// #beforeSave cloud fn
Parse.Cloud.beforeSave(className, async (request) => {
const { before } = request.context
// ... do something with before ...
})
Caveat: this wouldn't help you if you didn't have the attributes loaded in the _getServerData() function in the client
Second Caveat: parse will not handle (un)serialization for you in your cloud function, eg:
{
before: { // < posted as context
status: {
is: 'atRisk',
comment: 'Its all good now!',
at: '2021-04-09T15:39:04.907Z', // string
by: [Object] // pojo
}
},
after: {
status: { // < posted as doc's save data
is: 'atRisk',
comment: 'Its all good now!',
at: 2021-04-09T15:39:04.907Z, // instanceOf Date
by: [ParseUser] // instanceOf ParseUser
}
}
}

How to get and append most recent messages from server using jQuery and AJAX?

I'm working on my first simple chat application and this issue has me stuck. I know what I'm trying to do, but I end up overthinking it.
Basically, I have this heroku server going:
http://tiy-fee-rest.herokuapp.com/collections/blabberTalk
Whenever someone sends a message, it is added to this array.
My Issue:
I have it on a set interval so that every 2 seconds, it runs the getNewestMessages function. When this setInterval is working and someone sends a message, it will keep appending the last message they sent every 2 seconds. If I disable the setInterval and simply call the getNewestMessages function myself in a separate browser tab, this doesn't seem to happen. I want to make it so that the most recently sent message isn't constantly re-appended to the DOM when the setInterval is active.
This is the function I'm using to check for recent messages. It's pretty bloated, sorry about that:
getNewestMessages: function() {
$.ajax({
url: http://tiy-fee-rest.herokuapp.com/collections/blabberTalk,
method: 'GET',
success: function (data) {
// Finds Id of most recent message displayed in the DOM
var recentId = $('.message').last().data('id');
var prevMostRecent = 0;
var newMostRecent = [];
jQuery.each(data, function(idx,el){
if (el._id === recentId) {
// if one of the messages on the server has an Id equal to
// one of the messages in the DOM, it saves its index in a var
prevMostRecent = idx;
}
});
jQuery.each(data, function(idx,el){
if (idx < prevMostRecent) {
// if there are messages on the server with a lower index than
// the most recent message in the DOM, it pushes them to a new
// array. Basically, creates a new array of any messages newer
// than the last one displayed in the DOM.
newMostRecent.push(el);
}
});
for (var i = 0; i < newMostRecent.length; i++) {
console.log(newMostRecent[i]);
if (newMostRecent[i]._id === $('.message').last().data('id')) {
// My attempt at trying to remove the last DOM message from
// the array of newer messages. My main issue was that this
// whole function would keep appending the most recent message
// over and over again.
var result = _.without(newMostRecent, newMostRecent[i]);
console.log('MESSAGE TO BE EXCLUDED: ', newMostRecent[i]);
// If the array of newer messages contained the most recent
// DOM message, it removes it and sends it to be appended.
page.appendNewestMessages(result);
}
}
// If the array of newer messages DOESN'T contain the most recent
// DOM message, it just sends the whole array normally.
page.appendNewestMessages(newMostRecent);
},
error: function (err) {
}
});
}
Here is the append function:
appendNewestMessages: function(messagesToAppend) {
console.log(messagesToAppend.reverse());
_.each(messagesToAppend.reverse(), function(el, idx, arr) {
var newMessage = {
content: el.content,
timestamp: el.timestamp,
author: el.author,
userIcon: el.userIcon
}
$.ajax({
url: page.url,
method: 'POST',
data: newMessage,
success: function (data) {
page.addOneMessageToDOM(data);
},
error: function (err) {
console.log("error ", err);
}
});
})
}
Can anyone help me understand how to get the most recent messages from a server and append them to the DOM without any repeats? This has been driving me nuts.
Thanks for any and all help.

Parse afterSave function getting skipped over

so i have a messaging app using parse.com as my backend. When i send a message from the app it saves it on Parse.com to a class called "NewMessages". Then in my cloud code i have an afterSave function dedicated to this class so that when a new object gets saved to "NewMessages" it picks a random user attaches it to the message and saves it in a new class called "Inbox". Then it deletes the original message from "NewMessages".
So the "NewMessages" class should always be empty right? But when I send a bunch of messages very quickly some get skipped over. How do i fix this?
Is there a better way to structure this than using afterSave?
function varReset(leanBody, leanSenderName, leanSenderId, randUsers){
leanBody = "";
leanSenderName = "";
leanSenderId = "";
randUsers = [];
console.log("The variables were set");
}
Parse.Cloud.afterSave("Lean", function(leanBody, leanSenderName, leanSenderId, randUsers, request) {
varReset(leanBody, leanSenderName, leanSenderId, randUsers);
var query = new Parse.Query("NewMessages");
query.first({
success: function(results){
leanBody = (results.get("MessageBody"));
leanSenderName = (results.get("senderName"));
leanSenderId = (results.get("senderId"));
getUsers(leanBody, leanSenderName, leanSenderId);
results.destroy({
success: function(results){
console.log("deleted");
}, error: function(results, error){
}
});
}, error: function(error){
}
});
});
function getUsers(leanBody, leanSenderName, leanSenderId, response){
var query = new Parse.Query(Parse.User);
query.find({
success: function(results){
var users = [];
console.log(leanBody);
console.log(leanSenderName);
//extract out user names from results
for(var i = 0; i < results.length; ++i){
users.push(results[i].id);
}
for(var i = 0; i < 3; ++i){
var rand = users[Math.floor(Math.random() * users.length)];
var index = users.indexOf(rand);
users.splice(index, 1);
randUsers.push(rand);
}
console.log("The random users are " + randUsers);
sendMessage(leanBody, leanSenderName, leanSenderId, randUsers);
}, error: function(error){
response.error("Error");
}
});
}
function sendMessage(leanBody, leanSenderName, leanSenderId, randUsers){
var Inbox = Parse.Object.extend("Inbox");
for(var i = 0; i < 3; ++i){
var inbox = new Inbox();
inbox.set("messageBody", leanBody);
inbox.set("senderName", leanSenderName);
inbox.set("senderId", leanSenderId);
inbox.set("recipientId", randUsers[i]);
console.log("leanBody = " + leanBody);
console.log("leanSenderName = " + leanSenderName);
console.log("leanSenderId = " + leanSenderId);
console.log("recipient = " + randUsers[i]);
inbox.save(null, {
success: function(inbox) {
// Execute any logic that should take place after the object is saved.
alert('New object created with objectId: ' + inbox.id);
},
error: function(inbox, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
alert('Failed to create new object, with error code: ' + error.message);
}
});
}
}
Have you checked your logs? You may be falling afoul of resource limits (https://parse.com/docs/cloud_code_guide#functions-resource). If immediacy is not important, it may be worth looking into set up a background job that runs every few minutes and tackles undelivered messages. It may also be possible to combine the two approaches: having the afterSave function attempt to do an immediate delivery to Inboxes, while the background job picks up any NewMessages left over on a regular basis. Not the prettiest solution but at least you have a bit more reliability. (You'll have to think about race conditions though where the two may attempt deliveries on the same NewMessage.)
Regarding your question about a better structure, if the two classes are identical (or close enough), is it possible to just have a Messages class? Initially the "to" field will be null but is assigned a random recipient on a beforeSave function. This may be faster and neater.
EDIT: Adding a 3rd observation which was originally a comment:
I saw that you are using a Query.first() in afterSave in order to find the NewMessage to take care of. Potentially, a new NewMessage could have snuck in between the time afterSave was called, and the Query was run. Why not get the ID of the saved NewMessage and use that in the Query, instead of first()?
query.get(request.object.id,...);
This ensures that the code in afterSave handles the NewMessage that it was invoked for, not the one that was most recently saved.

Getting result from querying sqlite db in the add-on script to be submitted to the content script

I am writting a modest firefox add-on and I have some problems getting the results used inside the "flow" of the add-on script.
I have the code taking care of querying a sqlite database as a module but I don't know how to create a callback inside of it so that the pagemod in the add-on script can use it and pass it to the content script.
Basically here is what I have:
main.js :
var pageMod = require("sdk/page-mod");
var self = require("sdk/self");
var myDbScript = require('./myDbScript');
pageMod.PageMod({
include: "*.example.com/*",
contentScriptFile: [self.data.url('jquery-1.10.2.min.js'),
self.data.url('myContentScript.js')],
onAttach: function(worker) {
// Query the database on behalf of the content script
worker.port.on('queryTheDB', function(message) {
// Get the data from the DB (é is some test value here)
// Not working because asynchronous querying of the DB
var resultFromDB = myDbScript.getResult(2);
// Send the result to the content script
worker.port.emit('hereIsYourResult', resultFromDB);
});
}
});
myDBScript.js
// Get required components
var {components} = require("chrome");
components.utils.import("resource://gre/modules/FileUtils.jsm");
components.utils.import("resource://gre/modules/Services.jsm");
// Some code to get the DB
// Create statement to retrieve country based on the IP
var statement = dbConnection.createStatement("SELECT col1, col2 FROM table WHERE col1 = :given_col1");
function getResult(submittedValue) {
// Bind parameters
statement.params.given_col1 = submittedValue;
// Execute
statement.executeAsync({
handleResult: function(aResultSet) {
for (let row = aResultSet.getNextRow();
row;
row = aResultSet.getNextRow()) {
var resultFromDB = row.getResultByName("col2");
}
},
handleError: function(aError) {
print("Error: " + aError.message);
return 'error';
},
handleCompletion: function(aReason) {
if (aReason != components.interfaces.mozIStorageStatementCallback.REASON_FINISHED) {
print("Query canceled or aborted!");
return 'canceledOrAborted';
} else {
// Sending the result to the add-on script so that it can
// pass it to the content script
notifyingTheAddonScript(resultFromDB);
}
}
});
}
// Enable the use of the getResult function
exports.getResult = getResult;
The thing is that I don't see how to have the addon script be aware that the result is ready. Please bear with me, I am a noob at this...
Since I don't have the full source, I cannot test. So you'll have to fix any I made errors yourself ;)
First, lets add a callback.
// #param {function(result, error)} callback
// Called upon query completion.
// if |error| is a string, then the query failed.
// Else |result| will contain an array of values.
function getResult(submittedValue, callback) { // changed
// Bind parameters
statement.params.given_col1 = submittedValue;
var rv = [], err = null; // added
// Execute
statement.executeAsync({
handleResult: function(aResultSet) {
for (let row = aResultSet.getNextRow();
row;
row = aResultSet.getNextRow()) {
rv.push(row.getResultByName("col2")); // changed
}
},
handleError: function(aError) {
print("Error: " + aError.message);
err = aError.message; // changed
},
handleCompletion: function(aReason) {
if (aReason != components.interfaces.mozIStorageStatementCallback.REASON_FINISHED) {
print("Query canceled or aborted!");
err = err || 'canceled or aborted'; // changed
}
callback(err ? null : rv, err); // replaced
}
});
}
Lets use this stuff now in the pagemod
onAttach: function(worker) {
// Query the database on behalf of the content script
worker.port.on('queryTheDB', function(message) {
// Get the data from the DB (é is some test value here)
// Not working because asynchronous querying of the DB
myDbScript.getResult(2, function callback(result, error) {
if (error) {
worker.port.emit("hereIsYourError", error);
return;
}
worker.port.emit("hereIsYourResult", result);
});
});
}
You might want to take some precautions not to fire multiple queries. While it would be OK to do so, it might hurt performance ;)
Since our callback already looks kinda like a promise, it might actually be a good idea to use promises, maybe even with the Sqlite.jsm module and some Task.jsm magic.

Categories

Resources