I'm trying to delete all objects in a class, but whenever I attempt to delete objects from a cloud function or job I get an inconsistent number of objects left over. The jobs always take under second, so I don't think that's the issue (working with under 100 objects anyway). I seem to always have a random number of objects left over and no errors. This is what I'm working with now.
Parse.Cloud.job("deletePosts", function(request, status) {
Parse.Cloud.useMasterKey();
var query = new Parse.Query("Posts");
query.find({
success: function(results) {
Parse.Object.destroyAll(results).then(function() {
console.log("Delete job completed.");
status.success("Delete job completed.");
});
},
error: function(error) {
console.log("Error in delete query error: " + error);
status.error("Error in delete query error: " + error);
}
});
});
When deleting objects in cloud code, use query.each instead of query.find to ensure that you delete all objects matching the query .
find has the query limitation of 100 objects returned by default (or up to 1000 if limit is used). Source
Below is an example of using a promise chain which calls destroy on each Post object. When all of the destroy promises have completed, the success status will be reached, and if any of the destroys fail then the error status will be reached.
Parse.Cloud.job("deletePosts", function(request, status) {
Parse.Cloud.useMasterKey();
var query = new Parse.Query("Posts");
query.each(function(post) {
return post.destroy();
}).then(function() {
console.log("Delete job completed.");
status.success("Delete job completed.");
}, function(error) {
alert("Error: " + error.code + " " + error.message);
status.error("Error: " + error.code + " " + error.message);
});
});
Related
I want to set up my custom code in cloud code of parse.
Parse.Cloud.job("deleteUser", function(request, status) {
const query = new Parse.Query("SegmentData");
query.equalTo("userID", request.userID);
query.find()
.then(Parse.Object.destroyAll)
.catch(function(error) {
console.error("Error finding related comments " + error.code + ": " + error.message);
});
const query2 = new Parse.Query("ShowData");
query.equalTo("userID", request.userID);
query.find()
.then(Parse.Object.destroyAll)
.catch(function(error) {
console.error("Error finding related comments " + error.code + ": " + error.message);
});
});
This is the code I have written so far. I want to destroy all the users that have username. They can be even more than 1000. Will this work if the users have more than 1000 records or do I have to amend my code?
With the parse-server the limit of each query is 100 you need to set to
query.limit(1000);
If you want to have 1000 users maximum
You can see here how parse-server test and set the limit of each query:
https://github.com/parse-community/parse-server/blob/master/spec/ParseAPI.spec.js#L314
https://github.com/parse-community/parse-server/blob/master/src/Routers/ClassesRouter.js#L29
On a azure custom api, I'm attempting to perform bulk insert operation to three tables utilising database transctions.Then I'm facing this error on the console.
The request 'POST /api/saveinvite' has timed out. This could be caused by a script that fails to write to the response, or otherwise fails to return from an asynchronous call in a timely manner.
exports.post = function (request, response) {
console.log("save invite executed!!");
var jsonfriendcircle = request.body.jsonfriendcircle;
var jsoninviteelist = request.body.jsoninviteelist;
var jsoninviteefriendcirclelist = request.body.jsoninviteefriendcirclelist;
console.log("Circle is :" + jsonfriendcircle);
console.log("Inviteelist is :" + jsoninviteelist);
console.log("Inviteefriendcirclelist is :" + jsoninviteefriendcirclelist);
var parsedjsfrcircle = JSON.parse(jsonfriendcircle);
var mssql = request.service.mssql;
console.log("mssql obj :" + mssql);
mssql.open({
success: function (connection) {
console.log("connection to db success");
console.log("circle id: " + parsedjsfrcircle["id"]);
console.log("circle name :" + parsedjsfrcircle["circle_name"]);
var sqlst1 = 'insert into friendcircle (id,circle_name)values(?,?)';
connection.query(sqlst1, [parsedjsfrcircle["id"], parsedjsfrcircle["circle_name"]], function (err, results) {
if (err) {
console.log("Error:" + err);
connection.rollback();
response.send(statusCodes.Error, { message: '' });
connection.close();
return;
} else {
// connection.commit();
// connection.close();
}
});
}
, error: function (err) {
console.log("Unable to connect to DB :" + err);
response.send(statusCodes.Error, { message: err });
}
});
};
It doesn't look like your trying to do too much. Inserting a few hundred rows should come back before the timeout.
You'll get a timeout if there is permissions issues with the Mobile Services user on the database table and your transaction and custom error handling may be hiding that. Ensure you've run a
GRANT SELECT, INSERT, UPDATE ON OBJECT::[dbo].[Invitee_FriendCircle] TO [Created_MobileServicesLogin_User]
--For whatever name was created when you made the app (not the name you entered when you created the app, but the wacky one that mobile services made)
Some things to try:
Ensure the tables you are inserting to are indexed for the type of insert you're doing. If it's a huge table and indexing is an issue, let the mobile api insert to a small temp table and then run an asynch job with sp_start_job to update the main table with the temp table. That way you are not waiting while the table does the update.
Write stored procedures in the database that contain the inserts and pass the variables into them instead of writing the INSERT query here. Inside the sproc you can also do BULK INSERT if you really are passing a lot of values and need to do it quick.
Modify the ConnectionTimeout property in the connection string to your database.
You can modify the request.service.config.sqlConnectionString.
Try stripping it down to find the problem. Remove the transaction and just try doing it without the transaction and custom error handlers, on just one table at a time. Try
exports.post = function(request, response) {
var jsonfriendcircle=request.body.jsonfriendcircle;
var jsoninviteelist=request.body.jsoninviteelist;
var jsoninviteefriendcirclelist=request.body.jsoninviteefriendcirclelist;
var mssql=request.service.mssql;
var sqlst1='insert into FriendCircle (id,circle_name)values(?,?)' ;
mssql.query(sqlst1,[jsonfriendcircle.id,jsonfriendcircle.circle_name], {
success: function(results) {
response.send(statusCodes.OK, { message : 'success' });
},
error: function(err) {
console.log("error is: " + err);
}
});
};
Try them one at a time to see if anything fails, and if it does, check the LOGS on the Azure portal to see if it was a permission issue.
Good Luck!
Probably a transaction timeout after 30 seconds?
In SQL What is the default max transaction timeout
Try to rewrite the insert into multiple smaller transactions
I'm trying to query a Parse.com database to see if a certain field is available. I am then looking to do certain actions depending on whether or not the variable is present in the database.
Here is the full production function:
function getProduct() {
var Products = Parse.Object.extend("Products");
ProductEAN = 76130347394081;
output = "";
var query = new Parse.Query(Products);
query.equalTo("EANBarcode", ProductEAN );
query.find({
success: function(results) {
var no = results[0].get("EANBarcode");
var title = results[0].get("Name");
output += "[" + no + "] " + title;
console.log( "Output is: " + output );
},
error: function(error) {
alert(error.message);
}
});
}
If the ProductEAN is present, then the success function works as intended. If it is not present, rather than the error function running, I get a console output saying the following:
"Uncaught TypeError: Cannot read property 'get' of undefined"
I am following this guide: https://www.youtube.com/watch?v=2TVmgEJfbno
I am at a bit of a loss as to why this would not work. Does anyone have any experience with this?
Error callback of find() method gets called only when there's an error in execution. In the case when ProductEAN is not present, it is still a successful query but with zero matching records. Which means it calls your success callback with results being an empty array. Hence results[0] is undefined , which explains the console error output.
You might want to change your success callback to verify for the results.length and make decision appropriately. Something like this :
query.find({
success: function(results) {
if(results.length > 0){
var no = results[0].get("EANBarcode");
var title = results[0].get("Name");
output += "[" + no + "] " + title;
console.log( "Output is: " + output );
}
else{
console.log('No matching records');
}
},
error: function(error) {
alert(error.message);
}
});
I'm currently running the Parse cloud code background job, which involves querying all users and then running a number of functions for every user object that's returned. How do I set the query to only return the first ______ user objects, rather than all of them?
I know that if you wanted to only return the first result you'd do return usersQuery.first instead of return usersQuery.each. Is there an equivalent that only returns the first X number of results?
Parse.Cloud.job("mcBackground", function(request, status) {
// ... other code to setup usersQuery ...
Parse.Cloud.useMasterKey();
var usersQuery = new Parse.Query(Parse.User);
return usersQuery.each(function(user) {
return processUser(user)
.then(function(eBayResults) {
return mcComparison(user, eBayResults);
});
})
.then(function() {
// Set the job's success status
status.success("MatchCenterBackground completed successfully.");
}, function(error) {
// Set the job's error status
status.error("Got an error " + JSON.stringify(error));
});
});
You can't combine .limit with .each unfortunately. I would suggest not using a background job for this at all, and instead running this logic on Heroku or another provider (even a local machine) using the parse npm module. This would allow you more flexibility and you won't need to break it in to 1,000 object chunks.
Try using Parse's .limit() option:
Parse.Cloud.job("mcBackground", function(request, status) {
// ... other code to setup usersQuery ...
Parse.Cloud.useMasterKey();
var usersQuery = new Parse.Query(Parse.User).limit(7);
return usersQuery.each(function(user) {
return processUser(user)
.then(function(eBayResults) {
return mcComparison(user, eBayResults);
});
})
.then(function() {
// Set the job's success status
status.success("MatchCenterBackground completed successfully.");
}, function(error) {
// Set the job's error status
status.error("Got an error " + JSON.stringify(error));
});
});
I have the following job I've created to clean out unneeded records. I've simplified the code below to troubleshoot the root cause, but with just this, over 80% of the times I run it fails to find anything due to Error code 1 "internal error":
Parse.Cloud.job('cleanStories', function(request, status) {
Parse.Cloud.useMasterKey();
var counter = 0;
var query = new Parse.Query('Story');
query.doesNotExist("username");
query.limit(1000);
query.find({
success: function(results) {
counter += results.length;
status.success(counter + " stories found.");
},
error: function(error) {
status.error(counter + " stories found. Error: " + error.code + " " + error.message);
}
});
});
I currently have about 568k records. This is down from almost 800k, which is when I started running this job to clean out records. It was usually running fine, but since has started erroring out very consistently. What am I doing wrong?
EDIT:
I have decreased the limit to 50 and it has a higher rate of success on executing. At 100 (default) it still regularly fails. Is there anyway I can get it back up to 1000 to get through the rest of the records faster?
This is based on a weak theory that smaller limits will help (if it really is a timeout issue, hopefully its a per query timeout and not a per job timeout, otherwise this idea won't help at all). But here's an idea how to do large find using limit/skip:
function findN(query, results) {
results = results || [];
query.skip(results.length);
return query.find().then(function(findResults) {
results = results.concat(findResults);
return (findResults.length === 0)? results : findN(query, results);
});
}
// to run this, build a query
var query = new Parse.Query("MyClass");
// qualify the query however you wish
// set any limit up to 1000, but our guess is that yours should be small
query.limit(20);
var results = [];
findN(query, results).then(function(r) {
console.log("found " + r.length + " rows");
// r will contain all of the rows matching the query, found 20 at a time
});