I am using usergrid to store data for a customer project. It's got two collections carShowrooms and cars. So far I am good. But I have a scenario where I have refresh the masterdata of the collection cars. Everytime I do this, I have to delete all the existing data in cars and replace it with incoming cars data from the master inventory system.
Now, with the docu in https://www.npmjs.org/package/usergrid, I see that I can only destroy one car at a time.
car.destroy(function(err){
if (err){
//error - car not deleted
//winston log - tbd
} else {
//success - car deleted
}
});
This is ok for smaller showrooms, but bigger multibrand showrooms have variety of cars - sometimes even upto 50 different varieties (8 car brands * approx. 8 different options).
Is there a mass delete option? can someone please point me to a docu if I am missing something here.
P.S. I am new to usergrid, if this is a repeated question, please mark so and point me to the right url
If you're so inclined, I've written a Node.js bulk deleter that runs delete requests in parallel. It takes approximately 3 minutes to delete 1000 entities.
Here's an always up-to-date gist, and a copy for SO:
// Installation
// 1. Install Node.js http://nodejs.org/download/
// 2. In Terminal, cd (navigate) to the directory where you saved this file
// 3. Run 'npm install request async'
// 4. Edit the script config below with your token, org, app, and collection name.
// 5. To run the script, at the Terminal prompt, run 'node api_baas_deleter.js'
// Config
var access_token = "{token}";
var as_basepath = "http://api.usergrid.com/{org}/{app}/"; // You need the trailing slash!
var collection = "{collection_name}";
// End Config
var request = require('request');
var async = require('async');
var authstring = "access_token=" + access_token;
var total = 0;
var startTime = Date.now();
function deleteRecords(callback) {
request.get({
url: as_basepath + collection + "?" + authstring,
json: true
}, function(e, r, body) {
if (body.count === undefined) {
var err = "Error: invalid endpoint. Check your basepath and collection name.";
console.log(err);
if (typeof(callback) === 'function') {
callback(err)
}
} else {
// console.log("Found " + body.count + " entities");
if (body.count > 0) {
var deletes = [];
for (var i = 0; i < body.count; i++) {
deletes.push({
url: as_basepath + collection + "/" + body.entities[i].uuid + "?" + authstring,
json: true
});
console.log("Deleting " + body.entities[i].uuid)
}
async.each(deletes, function(options, callback) {
request.del(options, function(e, r, body) {
if (r.statusCode === 200) {
total++;
}
callback(e);
});
}, function(err) {
setTimeout(function() {
deleteRecords(collection, function(e) {
callback(e);
});
}, 600); // Mandatory, since it seems to not retrieve entities if you make a request in < 600ms
});
} else {
var timeInMinutes = minutesFromMs(Date.now() - startTime);
console.log("Deleted " + total + " entities in " + timeInMinutes + " minute" + ((timeInMinutes > 1 || timeInMinutes < 1) ? "s" : ""));
if (typeof(callback) === 'function') {
callback()
}
}
}
});
}
function minutesFromMs(time) {
return Math.round(((time % 86400000) % 3600000) / 60000).toString();
}
deleteRecords();
There currently isn't a mass delete function in the Usergrid Node SDK, but you can create one. This is how I added a monkey-patched delete-by-query function into the Node SDK:
Usergrid.client.prototype.delete = function(opts, callback) {
if (_.isFunction(opts)) { callback = opts; opts = undefined; }
if (!opts.qs.q) { opts.qs.q = '*'; }
var options = {
method: 'DELETE',
endpoint: opts.type,
qs: opts.qs
};
var self = this;
this.request(options, function (err, data) {
if (err && self.logging) {
console.log('entities could not be deleted');
}
if (typeof(callback) === 'function') {
callback(err, data);
}
});
};
Hope that helps!
Scott
Related
I'm creating TS3 Query Bot using this
I came up with core.js, register.js and 2 configs for them that are not important in here.
Core.js:
const TeamSpeak3 = require("ts3-nodejs-library")
var config = require("./configs/config.js")
//Create a new Connection
const ts3 = new TeamSpeak3({
host: "localhost",
queryport: 10011,
serverport: 9987,
username: "serveradmin",
password: "",
nickname: "NodeJS Query Framework"
})
/*
Ready gets fired when the Bot has connected to the TeamSpeak Query and
issued login commands (if username and password has been given)
selected the appropriate Server (also if given in the config)
and set the nickname
*/
ts3.on("ready", async () => {
Promise.all([
ts3.registerEvent("server"),
ts3.registerEvent("channel", 0),
ts3.registerEvent("textserver"),
ts3.registerEvent("textchannel"),
ts3.registerEvent("textprivate")
]).then(() => {
console.log("[ JANbot ] Nasluchuje eventow: server, channel (0), textserver, textchannel, textprivate")
}).catch(e => {
console.log("Catched an error!")
console.error(e)
})
})
if(config.registerEnabled = true) {
var register = require("./inc/functions/register.js")
ts3.on("clientmoved", event => {
register.registerUser(event, event.client)
})
}
ts3.on("error", e => console.log("Error", e.message))
ts3.on("close", e => console.log("Connection has been closed!", e))
register.js:
module.exports = function() {
this.registerUser = function(event, client)
{
var register_config = require("/home/node_modules/bot/configs/register_config.js")
if(event.channel.cid == register_config.channelId) {
if(! client.servergroups.includes(register_config.groupId)) {
event.client.addGroups(config.groupId)
event.client.message("You have been successfully registered!")
event.client.kickFromChannel()
return console.log("zarejestrowano " + event.client.nickname)
} else {
event.client.message("You are already registred!")
event.client.kickFromChannel()
return console.log(event.client.nickname + " juz jest zarejestrowany")
}
}
};
}
Main idea is to handle bot and its connection in core.js and just require specific functions to it to make easier updates (you don't need to download whole core.js with many functions, just one file and eventually a config for it)
The error i am getting:
Error register.registerUser is not a function
I'm not advanced in JS so please if I did anything wrong except my problem please tell me that. I have some little thoughts what's wrong but I cannot fix that. I think that registerUser is not function but I don't have any idea how to implement register.js into here:
if(config.registerEnabled = true) {
var register = require("./inc/functions/register.js")
ts3.on("clientmoved", event => {
register.registerUser(event, event.client)
})
}
Btw. I added returns only because I thought it would change registerUser into function.1
//EDIT:
It works, but now I struggle with second problem.
Now the code:
module.exports = {
registerUser: function(event, client)
{
var register_config = require("/home/node_modules/bot/configs/register_config.js")
if(event.channel.cid == register_config.channelId) {
var grupa = register_config.groupId
if(!event.client.servergroups.includes(register_config.groupId)) {
var date = new Date(),
g = date.getHours(),
m = date.getMinutes(),
s = date.getSeconds()
event.client.addGroups(register_config.groupId)
event.client.poke("You have been successfully registered!")
event.client.kickFromChannel()
console.log(" [ " + g + " : " + m + " : " + s + " ] " +'Zarejestronwalem: ' + client.nickname)
} else {
event.client.poke("You are already registered!")
event.client.kickFromChannel()
}
}
}
}
the part:
if(!event.client.servergroups.includes(register_config.groupId)) {
is always true because somehow array servergroups of client doesn't have the number 11 in it so script just thinks it's true. I added console.log(event.client.servergroups) to see if is it problem of register_config.groupId or something else and this shows up in console:
[ 9, 11 ]
So after that I have spotted that it is problem with register_config.groupId because bot with code like this:
(core.js is the same as it was before second problem appeard)
register.js:
module.exports = {
registerUser: function(event, client)
{
var register_config = require("/home/node_modules/bot/configs/register_config.js")
if(event.channel.cid == register_config.channelId) {
var grupa = register_config.groupId
if(!event.client.servergroups.includes(register_config.groupId)) {
var date = new Date(),
g = date.getHours(),
m = date.getMinutes(),
s = date.getSeconds()
event.client.addGroups(register_config.groupId)
event.client.poke("You have been successfully registered!")
event.client.kickFromChannel()
console.log(" [ " + g + " : " + m + " : " + s + " ] " +'Zarejestronwalem: ' + client.nickname)
} else {
event.client.poke("You are already registered!")
event.client.kickFromChannel()
}
}
}
}
register_config.js:
module.exports = {
channelId: 4,
groupdId: 11
}
Bot behaves like here:
client on random channel (not with cid 4)
client joins channel with cid 4
bot kicks him,
pokes him,
doesn't give him server group
Conclusion: register_config.groupId somehow isn't defined as number eleven.
//EDIT #2:
I've done this:
var register_config = {
"groupId": 11,
"channelId": 4
};
module.exports = register_config;
And it works :) Never mind and thank you for help !
In register.js export an object instead of a function:
// module.exports = function() {
module.exports = {
// this.registerUser = function(event, client)
registerUser: function(event, client)
{
var register_config = require("/home/node_modules/bot/configs/register_config.js")
if(event.channel.cid == register_config.channelId) {
if(! client.servergroups.includes(register_config.groupId)) {
event.client.addGroups(config.groupId)
event.client.message("You have been successfully registered!")
event.client.kickFromChannel()
return console.log("zarejestrowano " + event.client.nickname)
} else {
event.client.message("You are already registred!")
event.client.kickFromChannel()
return console.log(event.client.nickname + " juz jest zarejestrowany")
}
}
};
}
I am trying to build a file of json data from repeated calls to a restAPI. The final file to be written is the sum of the data received from all the calls. At present the file is being written with contents of the first call then overwritten by the contents of the first + second call (see console output below code).
As I have to make many calls, once the code is working, I would like to only write the file once the request has finished and the json string has been built. Does anyone now how I would go about doing this? Maybe with a callback(?), which I still don't have the hang of, once the requests have finished or the json string has finished being built.
"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');
const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";
var options = {
url: '',
method: 'GET',
accept: "application/json",
json: true,
};
var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
.pipe(parse({delimiter: ','}))
.on('data', function(data) {
csvData.push(data[1]);
})
.on('end',function() {
var start = Date.now();
var records = csvData.length //2212 objects
console.log(records);
var dataLength = 2 //set low at moment
for (var i = 0; i < dataLength; i += 1) {
var url = shapes + csvData[i];
options.url = url; //set url query
request(options, function(error, response, body) {
var time = Date.now() - start;
var s = JSON.stringify(body.response);
console.log( '\n' + (Buffer.byteLength(s)/1000).toFixed(2)+
" kilobytes downloaded in: " + (time/1000) + " sec");
console.log(i)
buildJSON(s);
});
}
function buildJSON(s) {
var newStr = s.substring(1, s .length-1);
jsonData += newStr + ',';
writeFile(jsonData);
}
function writeFile(jsonData) {
fs.writeFile(pathJSON, jsonData, function(err) {
if (err) {
return console.log(err);
} else {
console.log("file complete")
}
});
}
});
128.13 kilobytes downloaded in: 2.796 sec
2
file complete
256.21 kilobytes downloaded in: 3.167 sec
2
file complete
Perhaps writing to the file after all requests are complete will help. In the current code, the writeFile function is called each time a request is completed (which overwrites the file each time)
A quick way to fix this is to count requests (and failures) and write to file only after all the requests are complete.
"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');
const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";
var options = {
url: '',
method: 'GET',
accept: "application/json",
json: true,
};
var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
.pipe(parse({
delimiter: ','
}))
.on('data', function (data) {
csvData.push(data[1]);
})
.on('end', function () {
var start = Date.now();
var records = csvData.length //2212 objects
console.log(records);
var dataLength = 2 //set low at moment
var jsonsDownloaded = 0; // Counter to track complete JSON requests
var jsonsFailed = 0; // Counter to handle failed JSON requests
for (var i = 0; i < dataLength; i += 1) {
var url = shapes + csvData[i];
options.url = url; //set url query
request(options, function (error, response, body) {
if(error){
jsonsFailed++;
writeFile(jsonData);
return;
}
jsonsDownloaded++;
var time = Date.now() - start;
var s = JSON.stringify(body.response);
console.log('\n' + (Buffer.byteLength(s) / 1000).toFixed(2) +
" kilobytes downloaded in: " + (time / 1000) + " sec");
console.log(i)
buildJSON(s);
});
}
function buildJSON(s) {
var newStr = s.substring(1, s.length - 1);
jsonData += newStr + ',';
writeFile(jsonData);
}
function writeFile(jsonData) {
if(dataLength - (jsonsDownloaded + jsonsFailed) > 0){
return;
}
fs.writeFile(pathJSON, jsonData, function (err) {
if (err) {
return console.log(err);
} else {
console.log("file complete")
}
});
}
});
Note:
Requests being fired in quick succession like (2000 requests in a for loop) in my experience does not work well.. Try batching them. Also, doing it this way does not guarantee order (if that is important in your usecase)
An alternative would be to open your file in append mode. You can do this by passing an extra options object with flag set to your fs.writeFile call.
fs.writeFile(pathJSON, jsonData, {
flag: 'a'
}, function (err) {
if (err) {
return console.log(err);
}
});
References:
fs.writeFile Docs
File system flags
I am porting an old ruby script over to use javascript setting the function as a cron instance so it will run on schedule. The function queries our mysql database and retrieves inventory information for our products and then sends requests to a trading partners api to update our inventory on their site.
Due to nodes a-synchronicity I am running into issues. We need to chunk requests into 1000 items per request, and we are sending 10k products. The issue is each request is just sending the last 1000 items each time. The for loop that is inside the while loop is moving forward before it finishes crafting the json request body. I tried creating anon setTimeout functions in the while loop to try and handle it, as well as creating an object with the request function and the variables to be passed and stuffing it into an array to iterate over once the while loop completes but I am getting the same result. Not sure whats the best way to handle it so that each requests gets the correct batch of items. I also need to wait 3 minutes between each request of 1000 items to not hit the request cap.
query.on('end',()=>{
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({data: writeArray,fields:fields}),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' +(timestamp.getMonth() + 1) + '-' + timestamp.getDate()+ ' '+timestamp.getHours() +':'+timestamp.getMinutes()+':'+timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-'+timestamp+'.csv';
while(itemArray.length > 0){
alteredArray = itemArray.splice(0,999);
for(let i = 0; i < alteredArray.length; i++){
jsonObjectArray.push({
sku: alteredArray[i]['sku'],
quantity: alteredArray[i]["quantity"],
overstockquantity: alteredArray[i]["osInv"],
warehouse: warehouse,
isdiscontinued: alteredArray[i]["disc"],
backorderdate: alteredArray[i]["etd"],
backorderavailability: alteredArray[i]["boq"]
});
}
var jsonObject = {
login: user,
password: password,
items: jsonObjectArray
};
postOptions.url = endpoint;
postOptions.body = JSON.stringify(jsonObject);
funcArray.push({func:function(postOptions){request(postOptions,(err,res,body)=>{if(err){console.error(err);throw err;}console.log(body);})},vars:postOptions});
jsonObjectArray.length = 0;
}
var mili = 180000;
for(let i = 0;i < funcArray.length; i++){
setTimeout(()=>{
var d = JSON.parse(funcArray[i]['vars'].body);
console.log(d);
console.log('request '+ i);
//funcArray[i]['func'](funcArray[i]['vars']);
}, mili * i);
}
});
});
You would need async/await or Promise to handle async actions in node js.
I am not sure if you have node version which supports Async/await so i have tried a promise based solution.
query.on('end', () => {
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({ data: writeArray, fields: fields }),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' + (timestamp.getMonth() + 1) + '-' + timestamp.getDate() + ' ' + timestamp.getHours() + ':' + timestamp.getMinutes() + ':' + timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-' + timestamp + '.csv';
var calls = chunk(itemArray, 1000)
.map(function(chunk) {
var renameditemsArray = chunk.map((item) => new renamedItem(item, warehouse));
var postOptions = {};
postOptions.url = endpoint;
postOptions.body = JSON.stringify({
login: user,
password: password,
items: renameditemsArray
});
return postOptions;
});
sequenceBatch(calls, makeRequest)
.then(function() {
console.log('done');
})
.catch(function(err) {
console.log('failed', err)
});
function sequenceBatch (calls, cb) {
var sequence = Promise.resolve();
var count = 1;
calls.forEach(function (callOptions) {
count++;
sequence = sequence.then(()=> {
return new Promise(function (resolve, reject){
setTimeout(function () {
try {
cb(callOptions);
resolve(`callsequence${count} done`);
}
catch(err) {
reject(`callsequence ${count} failed`);
}
}, 180000);
});
})
});
return sequence;
}
function makeRequest(postOptions) {
request(postOptions, (err, res, body) => {
if (err) {
console.error(err);
throw err;
}
console.log(body)
});
}
function chunk(arr, len) {
var chunks = [],
i = 0,
n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len));
}
return chunks;
}
function renamedItem(item, warehouse) {
this.sku = item['sku']
this.quantity = item["quantity"]
this.overstockquantity = item["osInv"]
this.warehouse = warehouse
this.isdiscontinued = item["disc"]
this.backorderdate = item["etd"]
this.backorderavailability= item["boq"]
}
});
Could you please try this snippet and let me know if it works?I couldn't test it since made it up on the fly. the core logic is in the sequenceBatch function. the The answer is based on an another question which explains how timeouts and promises works together.
Turns out this wasn't a closure or async issues at all, the request object I was building was using references to objects instead of shallow copies resulting in the data all being linked to the same object ref in the ending array.
Here is the line (50) where this is happening:
var meetingId = meeting._id.toString(),
And here is the full, relevant code:
var MongoClient = require('mongodb').MongoClient;
var assert = require('assert');
var ObjectId = require('mongodb').ObjectID;
var config = require('./config'),
xlsx = require('./xlsx'),
utils = require('./utils'),
_ = require('lodash'),
url = config.DB_URL;
var meetings = [];
function findNumberOfNotesByMeeting(db, meeting, callback) {
var meetingId = meeting._id.toString(),
meetingName = meeting.name.displayValue,
attendees = meeting.attendees;
host = meeting.host;
var count = 1, pending = 0, accepted = 0;
console.log("==== Meeting: " + meetingName + '====');
_.each(attendees, function(item) {
console.log(count++ + ': ' + item.email + ' (' + item.invitationStatus + ')');
if (item.invitationStatus == 'pending') { pending++; }
else if (item.invitationStatus == 'accepted') { accepted++; }
});
console.log("*** " + attendees.length + ", " + pending + "," + accepted);
db.collection('users').findOne({'_id': new ObjectId(host)}, function(err, doc) {
var emails = [];
if (doc.emails) {
doc.emails.forEach(function(e) {
emails.push(e.email + (e.primary ? '(P)' : ''));
});
}
var email = emails.join(', ');
if (utils.toSkipEmail(email)) {
callback();
} else {
db.collection('notes').find({ 'meetingId': meetingId }).count(function(err, count) {
if (count != 0) {
console.log(meetingName + ': ' + count + ',' + attendees.length + ' (' + email + ')');
meetings.push([ meetingName, count, email, attendees.length, pending, accepted ]);
}
callback();
});
}
});
}
function findMeetings(db, meeting, callback) {
var meetingId = meeting._id.toString(),
host = meeting.host;
db.collection('users').findOne({'_id': new ObjectId(host)}, function(err, doc) {
var emails = [];
if (!err && doc && doc.emails) {
doc.emails.forEach(function(e) {
emails.push(e.email + (e.primary ? '(P)' : ''));
});
}
var email = emails.join(', ');
if (utils.toSkipEmail(email)) {
callback();
} else {
db.collection('notes').find({ 'meetingId': meetingId }).count(function(err, count) {
if (count != 0) {
var cursor = db.collection('meetings').find({
'email': {'$regex': 'agu', '$options': 'i' }
});
}
callback();
});
}
cursor.count(function(err, count) {
console.log('count: ' + count);
var cnt = 0;
cursor.each(function(err, doc) {
assert.equal(err, null);
if (doc != null) {
findNumberOfNotesByMeeting(db, doc, function() {
cnt++;
if (cnt >= count) { callback(); }
});
}
});
});
});
}
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
findMeetings(db, function() {
var newMeetings = meetings.sort(function(m1, m2) { return m2[1] - m1[1]; });
newMeetings.splice(0, 0, [ 'Meeting Name', 'Number of Notes', 'Emails' ]);
xlsx.writeXLSX(newMeetings, config.xlsxFileNameMeetings);
db.close();
});
});
As you can see, the meeting variable (which I am almost 100% sure is the problem, not the _id property) is passed in just fine as a parameter to the earlier function findNumberOfNotesByMeeting. I have found some information here on SO about the fact that my new function may be asynchronous and needs a callback, but I've attempted to do this and am not sure how to get it to work, or even if this is the right fix for my code.
You're not passing the meeting object to findMeetings, which is expecting it as a second parameter. Instead of getting the meeting object, the function receives the callback function in its place, so trying to do meeting._id is undefined.
In fact, what is the purpose of the findMeetings function? It's name indicates it can either find all meetings in the database, or all meetings with a specific id. You're calling it without a meeting id indicating you might be trying to find all meetings, but its implementation takes a meeting object. You need to clear that up first.
I need to batch change a number of image links (URL's links that exist within a class in) to image files (that Parse.com hosts).
Cloud code is (apparently) how to do it.
I've followed the documentation here but haven't had any success.
What I wanted to do is:
Take URL link from "COLUMN_1"
Make it a file
Upload file to "COLUMN_1" (overwrite existing URL). If this is dangerous- can upload it to a new column ("COLUMN_2").
Repeat for next row
This code did not work (this is my first time with JS):
imgFile.save().then(function () {
object.set("COLUMN_1", imgFile);
return object.save();
}).then(function (CLASSNAME) {
response.success("saved object");
}, function (error) {
response.error("failed to save object");
});
Can anyone recommend how to do this?
OK- this successfully works for anyone else trying.
Parse.Cloud.job("convertFiles", function(request, status) { //Cuts the rundata out of poor runs
function sleep(milliseconds) {
var start = new Date().getTime();
for (var i = 0; i < 1e7; i++) {
if ((new Date().getTime() - start) > milliseconds){
break;
}
}
}
// Tell the JS cloud code to keep a log of where it's upto. Manually create one row (in class "debugclass") to get an object Id
Parse.Cloud.useMasterKey();
var Debug = Parse.Object.extend("debugclass");
var queryForDebugObj = new Parse.Query(Debug);
queryForDebugObj.equalTo("objectId", "KbwwDV2S57");
// Query for all users
// var queryForSublist = new Parse.Query(Parse.Object.extend("gentest"));
queryForDebugObj.find({
success: function(results) {
var debugObj = results[0];
var processCallback = function(res) {
var entry = res[0];
var debugObj = results[0];
debugObj.set("LastObject", entry.id);
debugObj.save();
Parse.Cloud.httpRequest({
url: entry.get("smallImage2"),
method: "GET",
success: function(httpImgFile)
{
console.log("httpImgFile: " + String(httpImgFile.buffer));
var imgFile = new Parse.File("picture.jpg", {base64: httpImgFile.buffer.toString('base64')});
imgFile.save().then(function () {
console.log("2");
entry.set("smallImage1", imgFile);
entry.save(null, {
success: function(unused) {
debugObj.increment("itemDone");
sleep(20);
res.shift();
if (res.length === 0) {
process(entry.id);
return;
}
else {
processCallback(res);
}
},
error: function(unused, error) {
response.error("failed to save entry");
}
});
});
},
error: function(httpResponse)
{
console.log("unsuccessful http request");
response.error(responseString);
}
});
};
var process = function(skip) {{
var queryForSublist = new Parse.Query("genpants");
if (skip) {
queryForSublist.greaterThan("objectId", skip);
console.error("last object retrieved:" + skip);
}
queryForSublist.ascending("objectId");
queryForSublist.find().then(function querySuccess(res) {
processCallback(res);
}, function queryFailed(reason) {
status.error("query unsuccessful, length of result " + result.length + ", error:" + error.code + " " + error.message);
});
}};
process(debugObj.get("LastObject"));
},
error: function(error) {
status.error("xxx Uh oh, something went wrong 2:" + error + " " + error.message);
}
});
});