Fetching relation to PFObject cloud code - javascript

I have a PFUser that has a days relation to a Day PFObject.
In my database it looks like this:
{
"_id": "WjLAnMuRmp",
"name": "c",
"_created_at": {
"$date": "2016-08-04T15:28:51.759Z"
},
"_updated_at": {
"$date": "2016-08-24T19:44:44.774Z"
},
"days": [
{
"__type": "Pointer",
"className": "Day",
"objectId": "BrQwmKAbJC"
},
{
"__type": "Pointer",
"className": "Day",
"objectId": "6wuDMl4kKI"
}
]
}
Pretty straight forward. In my cloud code, I'm trying to send up a PFUser objectId, then fetch all the days they have and iterate over them. For some strange reason I keep getting 0 returned when I do a relation query.
Here is what I'm working with:
Parse.Cloud.define("getDayAveragesForUser", function(request, response) {
console.log("-getDayAveragesForUser");
// Create the query on the User class
var fetchedUser = new Parse.User({id:request.params.userObjectId});
console.log("the fetched user: " + fetchedUser.id);
var relation = fetchedUser.relation("days");
var query = relation.query();
query.limit(365);
query.ascending("createdAt");
query.find({
success: function(results) {
console.log("Successfully retrieved " + results.length + " Days.");
},
error: function(error) {
console.log("Error: " + error.code + " " + error.message);
}
});
});
When I print the fetchedUser.id it's correct so I know I'm on the right user. This seems based on the documentation example:
var relation = user.relation("likes");
relation.query().find({
success: function(list) {
// list contains the posts that the current user likes.
}
});
This should be working fine.
===
I just add this to my Cloud Code to test:
var days = fetchedUser.get("days");
console.log("type of: " + typeof days);
which from this I get:
type of: undefined

The reason is that your days are not saved as relations but as a pointers. And in parse relations and pointers are handled in different ways.
In order to fetch the days pointer you need to change your query to look like the following:
var userQuery = new Parse.Query(Parse.User);
userQuery.include("days"); // include the days pointer in the results
userQuery.get(request.params.userObjectId, {
success: function(user) {
// This function will *not* be called.
console.log(user.get("days")); // print the days to console
},
error: function(error) {
}
});

Related

Youtube API Playlist - List playlist stopped working

I have a strange issue on a system that creates a youtube playlist, and then fills the playlist with videos from a database table.
The code is Version Controlled by GIT, and have no commits in the code for 3-4 months, and it has worked up to a couple of days back.
Suddenly it can't find the items, and therefore doesn't find the id, of the item that it should find. The system creates a list, then find the latest created playlist (which is the one you just created), and then fills the playlist up normally.
I'm not very good as javascript to be honest, are there any good javascript developers out there that can solve this? The error seems pretty common when googling, but In addition to the youtube api use, I find it hard to figure out the issue. Why it suddenly doesn't result items. (If I run a GET in Postman, ill get the correct playlist, so it should be something in the code that isn't working 100%)
function createPlaylist() {
var client = document.getElementsByName("client")[0].value;
var description = document.getElementsByName("information")[0].value;
return gapi.client.youtube.playlists.insert({
"part": [
"snippet,status"
],
"resource": {
"snippet": {
"title": client,
"description": description,
"position": 1,
"resourceId": {
"kind": "youtube#video",
"videoId": "mhmGwTDpPf0"
},
"tags": [
"postural workout"
],
"defaultLanguage": "en"
},
"status": {
"privacyStatus": "public"
}
}
})
.then(function(response) {
return gapi.client.youtube.playlists.list({
"part": [
"id,snippet,contentDetails"
],
"maxResults": 1,
"mine": true
})
.then(function(response) {
console.log("Response", response);
MyVars.latestPlaylistID = response.result.items[0].id;
pID = MyVars.latestPlaylistID
console.log(pID + " is the latest playlist");
var doms = document.getElementsByTagName("tr");
// Get every TR into an array
var array = $('tbody > tr').map(function() {
return $.map($(this).data(), function(v) {
return v;
});
}).get();
//array.reverse();
var array = array.filter(function(element){ return element.length>=11});
videosIdArray = array.reverse();
console.log(pID, videosIdArray, 0);
addVideoToPlayList(pID, videosIdArray, 0);
// setTimeout(addVideoToPlayList(pID, videosIdArray, 0), 5000);
document.getElementById("playlistID").innerHTML = 'https://www.youtube.com/playlist?list=' + pID;
document.getElementById("playlistID").href = 'https://www.youtube.com/playlist?list=' + pID;
},
function(err) { console.error("ListPlaylist error", err); });
},
function(err) { console.error("InsertPlaylist error", err); });
}
This is what happens now:
Error
And as you can see the items array is empty.
I have now solved it!
By breaking down the function into smaller functions it seems to give me the correct result. Why the problem suddenly occured is still uknown, but I'm glad it now works.
This is the final solution if others out there are trying to solve similiar issue.
function createPlaylist() {
var client = document.getElementsByName("client")[0].value;
var description = document.getElementsByName("information")[0].value;
return gapi.client.youtube.playlists.insert({
"part": [
"snippet,status"
],
"resource": {
"snippet": {
"title": client,
"description": description,
"position": 1
},
"status": {
"privacyStatus": "public"
}
}
}).then(function(response) {
console.log("Response", response);
});
}
function addToPlaylist() {
return gapi.client.youtube.playlists.list({
"part": [
"id,snippet,contentDetails"
],
"maxResults": 1,
"mine": true
})
.then(function(response) {
console.log("Response", response);
MyVars.latestPlaylistID = response.result.items[0].id;
pID = MyVars.latestPlaylistID
console.log(pID + " is the latest playlist");
var doms = document.getElementsByTagName("tr");
var array = $('tbody > tr').map(function() {
return $.map($(this).data(), function(v) {
return v;
});
}).get();
var array = array.filter(function(element){ return element.length>=11});
videosIdArray = array.reverse();
console.log(pID, videosIdArray, 0);
addVideoToPlayList(pID, videosIdArray, 0);
},
function(err) { console.error("ListPlaylist error", err); });
}

Nodejs Mogoose bulkWrite with condition check before upsert, and missing default fields

Problem: How to bulk insert million records by making condition check on a filed
Situation: I have to run a standalone node script multiple times and will get JSON data that needs to be inserted to Mongo DB. This JSON data can have same data which is inserted earlier or can have a change in one field (count) that needs to be updated.
Field 'count' can be greater, equal or lesser than the existing entry.
Requirement : I need to update that document (row) only if the count is greater than existing one.
I should be able to do this by fetching the existing records and then looping every record and do condition check. This is not I'm looking for as there will be millions of records to insert.
Question 1. Is there a way in mongoose bulkWrite to have such condition check before upsert?
Question 2. If bulkWrite is not what I should be using what
is the alternate solution which has better performance and less
overhead on application memory?
MongoDB server Version: 3.4.10
Below sample code inserts 2 records and after some delay, it tries to insert another record with only change in count that is lesser than existing entry in DB.
At this situation, my requirement is not to update the second record count value.
Question 3: Why is the default value fields are not set in DB?
Question 4: When dealing with huge set of data, is there an event to know if all the records are written to disk? As the DB Save is 'async' call, when to trigger database.close and process.exit(), current code is simply waiting for 5 seconds which is wrong way to implement.
If I don't close DB, application wont exit, and if I exit application with some timeout, it might result in exiting code before full data is written to disk.
/**
* Get DB Connection
*/
const mongoose = require("mongoose");
mongoose.Promise = global.Promise;
const url = "mongodb://127.0.0.1:27017/sample";
let DATABASE = {};
DATABASE.connect = () => {
let db = mongoose.createConnection(url, { autoIndex: false, connectTimeoutMS: 30000, reconnectTries: 30, reconnectInterval: 2000 });
console.log("Database connecting to URL ", url);
db.on('error', console.error.bind(console, 'connection error:'));
return db;
}
DATABASE.close = (db) => {
if (db) {
db.close(function () {
console.log('Mongoose default connection with DB disconnected ');
});
}
}
/**
* Now define schema (will be part of module file)
*/
const Schema = mongoose.Schema;
var apiDetailSchema = new Schema({
ip: String,
instance: Number,
component: String,
logDate: Number,
count: Number,
apiMethod: {type:String, default:'DEFAULT METHOD', required: true},
api: String,
status: String,
httpMethod: String,
environment: String,
datacenter: {type:String, default:'INDIA', required:true},
});
apiDetailSchema.index({ ip: 1, instance: 1, component: 1, logDate: 1, api: 1, status: 1, httpMethod:1, environment:1, datacenter:1}, { unique: true });
const API_DETAIL = {};
API_DETAIL.connect = () => {
if (API_DETAIL.db) {
console.log("Returning existing DB for API Schema");
return API_DETAIL.db;
}
console.log("Requesting New DB connection for API Schema");
API_DETAIL.db = DATABASE.connect();
return API_DETAIL.db;
}
API_DETAIL.close = () => {
if (API_DETAIL.db) DATABASE.close(API_DETAIL.db);
}
API_DETAIL.connect();
API_DETAIL.SCHEMA = API_DETAIL.db.model('apiDetail', apiDetailSchema);
/**
* Use of API_DETAIL to insert data
*/
var bulkUpdateApiData = (data) => {
let total = data.length;
return new Promise((resolve, reject) => {
if (total === 0) {
resolve("NO DATA to update API details");
}
console.log("Bulkupdating "+total+" API records");
let db = API_DETAIL.connect(); // Connect to DB
if (!db) {
console.log("Failed to obtain DB connection during API Bulk update");
reject("ERROR: DB Connection failed");
}
let bulkOps = [];
console.log("Going to Bulk update "+total+" API details");
data.forEach(d => {
let { ip, instance, component, logDate, count, api, status, httpMethod, environment, datacenter } = d;
let upsertDoc = {
'updateOne': {
// Filter applied to all field except count, so that it will update count
// TODO: Check if the count is more, then only update
'filter': { ip, instance, component, logDate, api, status, httpMethod, environment, datacenter },
'update': d,
'upsert': true
}
}
bulkOps.push(upsertDoc);
});
API_DETAIL.SCHEMA.bulkWrite(bulkOps).then(BulkWriteOpResultObject => {
console.log(total + " API Details updated to DB");
// console.log(JSON.stringify(BulkWriteOpResultObject, null, 2));
resolve("Updated "+total+ " API Details");
}).catch(e => {
console.log("ERROR upserting addIpDetail", e);
reject(e);
});
});
} // Function : bulkUpdateApiData
let initialData = [
{
"ip": "192.168.1.2",
"instance": 2,
"component": "NODE",
"logDate": "20180114",
"api": "/services/srest/abc/authenticator/login",
"status": "200",
"httpMethod": "POST",
"environment": "production",
"count": 8
},
{
"ip": "192.168.1.2",
"instance": 2,
"component": "NODE",
"logDate": "20180114",
"api": "/services/srest/abc/authenticator/logout",
"status": "204",
"httpMethod": "POST",
"environment": "production",
"count": 8888 // Initially it was more
}];
bulkUpdateApiData(initialData).then(output => {
console.log(output);
}).catch(e => {
console.log("Something went wrong during API Detail bulk update", e);
});
let newData = [
{
"ip": "192.168.1.2",
"instance": 2,
"component": "NODE",
"logDate": "20180114",
"api": "/services/srest/abc/authenticator/logout",
"status": "204",
"httpMethod": "POST",
"environment": "production",
"count": 10 // Now it is lesser than initial one
}];
// Wait for 2 seconds to complete previous write operation,
// if not below bulkWrite will complete first !!!
setTimeout(() => {
console.log("=================================================");
console.log("Bulk updating EXISTING data with lesser count");
bulkUpdateApiData(newData).then(output => {
console.log(output);
}).catch(e => {
console.log("Something went wrong during API Detail bulk update", e);
});
}, 2000);
console.log("-----------------------------------------------");
// As DB write / save is async operation, When should I call this CLOSE connection?
// Is there a way to know when exactly DB write is completed?
setTimeout(API_DETAIL.close, 5000);
Output:
> node bulkWrite.js
Requesting New DB connection for API Schema
Database connecting to URL mongodb://127.0.0.1:27017/sample
Bulkupdating 2 API records
Returning existing DB for API Schema
Going to Bulk update 2 API details
-----------------------------------------------
2 API Details updated to DB
Updated 2 API Details
=================================================
Bulk updating EXISTING data with lesser count
Bulkupdating 1 API records
Returning existing DB for API Schema
Going to Bulk update 1 API details
1 API Details updated to DB
Updated 1 API Details
Mongoose default connection with DB disconnected
DB Output, where the second record / document has update value in filed 'count'
> db.apidetails.find().pretty()
{
"_id" : ObjectId("5a5df2d1952021f65578fc8f"),
"api" : "/services/srest/abc/authenticator/login",
"component" : "NODE",
"count" : 8,
"datacenter" : null,
"environment" : "production",
"httpMethod" : "POST",
"instance" : 2,
"ip" : "192.168.1.2",
"logDate" : 20180114,
"status" : "200"
}
{
"_id" : ObjectId("5a5df2d1952021f65578fc90"),
"api" : "/services/srest/abc/authenticator/logout",
"component" : "NODE",
"count" : 10,
"datacenter" : null,
"environment" : "production",
"httpMethod" : "POST",
"instance" : 2,
"ip" : "192.168.1.2",
"logDate" : 20180114,
"status" : "204"
}

How to search all keys inside MongoDB collection using only one keyword

Is there a way for MongoDB to search an entire collection's keys' contents using only a single search keyword?
Suppose I have the following collection (let's call it foodCollection):
{
name: "Chocolate Mousse Cake",
type: "Cake"
},
{
name: "Mother's Cookies",
type: "Cookies"
},
{
name: "Dark Bar",
type: "Chocolate"
}
I want my search to look for matches that contain "Chocolate", meaning it should return "Chocolate Mousse Cake" and "Dark Bar".
I'm trying to do this using the ff: code:
Client-side controller
// Search Products
$scope.searchProduct = function () {
$http.get('/api/products/search/' + $scope.searchKeyword).success(function(data){
console.log(data);
})
.error(function(err) {
console.log("Search error: " + err);
});
}
Express.js
app.get('/api/products/search/:param', productController.search); // Search for product
Server-side controller (I used this reference from the MongoDB docs):
// Search
module.exports.search = function(req, res) {
console.log("node search: " + req.body);
Product.find({ $or: [{productName: req.body},
{productType: req.body}]
}, function(err, results) {
res.json(results);
});
}
When I executed this, I got nothing. Am I missing something?
Any help would be greatly appreciated. Thank you.
UPDATE (FINAL)
Finally solved this thanks to Joydip's and digit's tips. Here's my solution in case somebody else gets the same problem as I did:
Client-side controller
$scope.searchProduct = function () {
if ($scope.searchKeyword == '') {
loadFromMongoDB(); // reloads original list if keyword is blank
}
else {
$http.get('/api/products/search/' + $scope.searchKeyword).success(function(data){
if (data.length === 0) {
$scope.showNoRec = true; // my flag that triggers "No record found" message in UI
}
else {
$scope.showNoRec = false;
$scope.productList = data; // passes JSON search results to UI
}
});
}
}
Express.js
app.get('/api/products/search/:keyword', productController.search); // Search for product
Mongoose schema
var mongoose = require('mongoose');
var schema = new mongoose.Schema({
productName: String,
productType: String,
productMaker: String,
productPrice: Number,
createDate: Date,
updateDate: Date
});
schema.index({productName: "text", productType: "text", productMaker: "text"});
Server-side controller
module.exports.search = function(req, res) {
Product.find({$text: {$search : req.params.keyword}}, function(err, results){
res.json(results);
})
}
Thank you everyone for your help. :)
You can try by creating an Index:
db.yourollection.createIndex({"productName":1,"productType":1})
And then by searching for the value, Example:
Product.find({$text:{$search: 'Chocolate'}},{productName:1, productType:1});
If you want to search all key, then you can use
db.foodCollection.createIndex( { name: "text", description: "text" } )
then search by
db.foodCollection.find({ $text: { $search: "choco" } })

Cloud Code object.save() results in 'object not found' with very strange PUT command

Issue Description
I have a simple Cloud Code command to create or update an object. If there is NO objectId passed in, the routine creates a new object and returns the objectId. If the objectId exists in the parameter list, it fetches the object and updates the parameters accordingly.
The routine works for new objects fine.
The object.save() is failing when I try to update an object, despite the object.fetch() sub-routine working.
error: code=101, message=Object not found.
Verbose server logs indicate a very strange PUT command...
PUT /parse/classes/Receipt/[object%20Object]
what I would expect to see is
PUT /parse/classes/Receipt/GJaXcf7fLD
Object ACL is public r+w
Why is the object.save() not working with a valid objectId?
_
Cloud Code
Parse.Cloud.define("uploadReceipt", function(request,response) {
var Receipt = Parse.Object.extend("Receipt");
var receipt = new Receipt();
// passed in parameters are ['property' : ['type' : t, 'value' : v]]
var dict = request.params;
var objectIdDict = dict["objectId"];
console.log("Object Dict: " + objectIdDict);
Parse.Promise.as().then(function() {
// if we already have an objectId we are UPDATING
// Need to FETCH first
if (objectIdDict != undefined) {
console.log("Searching for ID: " + objectIdDict["value"]);
receipt.set("objectId",objectIdDict["value"]);
return receipt.fetch();
}
else {
console.log("NEW RECEIPT");
return Parse.Promise.as(receipt);
}
}).then(function(receipt) {
console.log("Receipt: " + receipt.id);
// copy over the keys from our passed in parameters to the object
for (var key in dict) {
//console.log("Key: " + key + " Value: " + dict[key]["value"]);
if (dict[key]["type"] == "Raw") {
console.log("Key: " + key + " Value: " + dict[key]["value"]);
receipt.set(key,dict[key]["value"]);
}
else if (dict[key]["type"] == "Date" && key != "updatedAt") {
console.log("Key: " + key + " Value: " + dict[key]["value"]);
var time = dict[key]["value"] * 1000; // milliseconds
receipt.set(key,new Date(time));
}
else {
// object type
var Obj = Parse.Object.extend(dict[key]["type"]);
var newObj = new Obj();
newObj.id = dict[key]["value"];
receipt.set(key,newObj);
}
}
// make sure our user is set
receipt.set("user",request.user);
// adjust the status because it has now been uploaded
receipt.set("status",RECEIPT_SUBMITTED);
console.log("Prior to save");
return receipt.save();
}).then(function(receipt) {
console.log("Finished");
response.success({"status":receipt.get("status"),"objectId":receipt.id});
},function (error) {
console.log(error);
response.error(error);
});
});
Steps to reproduce
Call the cloud code from iOS SDK with data for a new object
Notice that the command works and a new object is added to the database
Call the command again with updated information
Notice that the command fails with object not found
Expected Results
Object should be updated accordingly
Actual Outcome
error: code=101, message=Object not found.
Environment Setup
Server
parse-server version: 2.2.12
Operating System: Mac OS X 10.11.5
Hardware: MacBook Pro 2010
Localhost or remote server? Localhost
Javascript: Parse/js1.8.5
NodeJS 5.10.1
Database
MongoDB version: 3.2.4
Hardware: MacBook Pro 2010
Localhost or remote server? Localhost
Logs/Trace
Storing NEW object returns
verbose: POST /parse/classes/Receipt { 'user-agent': 'node-XMLHttpRequest, Parse/js1.8.5 (NodeJS 5.10.1)',
accept: '*/*',
'content-type': 'text/plain',
host: 'localhost:1337',
'content-length': '471',
connection: 'close' } {
"date": {
"__type": "Date",
"iso": "2016-06-19T00:30:37.492Z"
},
"category": {
"__type": "Pointer",
"className": "Category",
"objectId": "XZ1bSHtZBY"
},
"status": 0,
"amount": 61.45,
"notes": "Hopefully this works well",
"gui_status": -1,
"currency": "USD",
"user": {
"__type": "Pointer",
"className": "_User",
"objectId": "vL4ih9BAX8"
}
}
verbose: {
"status": 201,
"response": {
"objectId": "GJaXcf7fLD",
"createdAt": "2016-06-19T00:30:57.092Z"
},
"location": "http://localhost:1337/parse/classes/Receipt/GJaXcf7fLD"
}
Finished
verbose: {
"response": {
"result": {
"status": 0,
"objectId": "GJaXcf7fLD"
}
}
}
Attempt to Update object returns
verbose: PUT /parse/classes/Receipt/[object%20Object] { 'user-agent': 'node-XMLHttpRequest, Parse/js1.8.5 (NodeJS 5.10.1)',
accept: '*/*',
'content-type': 'text/plain',
host: 'localhost:1337',
'content-length': '473',
connection: 'close' } {
"category": {
"__type": "Pointer",
"className": "Category",
"objectId": "XZ1bSHtZBY"
},
"status": 0,
"amount": 5.47,
"notes": "How about now",
"gui_status": 0,
"date": {
"__type": "Date",
"iso": "2016-06-19T00:12:25.788Z"
},
"currency": "USD",
"user": {
"__type": "Pointer",
"className": "_User",
"objectId": "vL4ih9BAX8"
}
}
verbose: error: code=101, message=Object not found.
ParseError { code: 101, message: 'Object not found.' }
verbose: error: code=141, code=101, message=Object not found.
Figured it out thanks to some help from the parse-server community and GitHub user flovilmart
In the case of 'updating' an object, I was including a dictionary entry for the Receipt. This was successfully retrieving the Receipt that I wanted to update.
However, the issue was that once I pulled in the receipt object and was iterating through my dictionary of properties to update... I ran into the Receipt object information again. Thus I was trying to add a property of Receipt pointer to my Receipt with the pointer being itself! Ugh.
The very last else clause needed a condition on it to NOT include the pointer to the Receipt (itself)
for (var key in dict) {
if
....
else if (dict[key]["type"] != "Receipt"){
// object type, but don't include ourself! (the Receipt)
var Obj = Parse.Object.extend(dict[key]["type"]);
var newObj = new Obj();
newObj.set("objectId",dict[key]["value"]);
receipt.set(key,newObj);
}
}

IBM worklight JSON store remove array of documents

I am working in IBM worklight hybrid app,i am using JSON store to store data,to remove records from collection,i am using id and i could able delete single record using id,how to delete multiple records together from JSON store,if any example is there it will be useful,can anyone help me in doing this?Thanks in advance.
Delete function:
var id = JSON.parse(localStorage.getItem('jsonindex'));
var query = {
_id: id
};
var options = {
push: true
};
try {
WL.JSONStore.get(PEOPLE_COLLECTION_NAME).remove(query, options)
.then(function (res) {
console.log("REMOVE_MSG");
})
.fail(function (errorObject) {
console.log("Not Removed");
});
} catch (e) {
alert(INIT_FIRST_MSG);
}
JSON data
[{
"_id": 16,
"json": {
"name": " Debit",
"cardmonth": " 8",
"cardyear": " 2028",
"number": " 4216170916239547"
}
}, {
"_id": 17,
"json": {
"name": " Credit",
"cardmonth": " 7",
"cardyear": " 2027",
"number": " 4216170916239547"
}
}]
Try:
WL.JSONStore.get('collectionName').remove([...], options);
Replace ... with {_id: 1}, {_id: 2} or whatever query you want to use to remove documents.
If it doesn't work, please upgrade to the latest version of Worklight and try again.
Relevant:
PI10959: JSONSTORE FAILS TO REMOVE ALL DOCS IN THE DOC ARRAY WHEN A DOC ARRAY IS PASSED
IBM Worklight JSONStore | Remove Document from Collection and erase it from memory
If you are able to delete the single record. its easy to delete multiple record. but it raises some performance issues you have so many records.
var id="3"; If you are deleting this one by using Delete method. just do it for multiple records
var ids=[];
when user selects item ids.push(item.id);
for(i=0;i<ids.length;i++){
Delete(ids[i]); //its your Delete method
}

Categories

Resources