{"name":"tfss-4dde4ec8-e678-495a-a5d0-d3e51f0bdafc-search-pic-img6.jpg","url":"http://files.parsetfss.com/d130ccda-cf9f-4264-999e-09305bed0fd1/tfss-4dde4ec8-e678-495a-a5d0-d3e51f0bdafc-search-pic-img6.jpg"}
This is the return Object, When I upload the file on the parse server. I want to use this object to store it on the Parse Class avatar column as type "file" not string. In short I am trying to store it as image file after uploading. I want to do it in Cloud Code javascript rather than on html javacript. I want to send the object to parse and store it as image file.
Here's my cloud code. I am new to parse a bit.
Parse.Cloud.beforeSave("editProfileweb", function(request) {
var avatar = request.object;
var user = request.user;
if (user) {
user.set("avatar",avatar);
user.save(null, {
success: function(user) {
response.success();
},
error: function(error) {
// Show the error message somewhere and let the user try again.
response.success();
}
});
} else {
response.error("User not authenticated");
} });
I guess this is what you are looking for
Object.set({columnName: {"name": File.name, "url": File.url, "__type": "File"}});
Related
i want to send multiple file to Fire-base storage and meantime it has to save real-time database as a one record using java-script
ex:
file1 : urlonfirstfile
file2 : urlonsecondfile
for (var i = 0; i < file.length; i++) {
var task = ref.child(file.name).put(file, metadata);
task
.then(snapshot => snapshot.ref.getDownloadURL())
.then((url) => {
console.log(url);
userDetails.push({
email : email,
title1: tit,
detail1: dit,
file:file[i].name
});
});
}
Your question is kinda vague on what information you want to store. So I've made some assumptions to come up with the below code:
The files are to be uploaded to an area of Firebase Storage specific to the logged in user. (e.g. "userFiles/CURRENT_USER/...")
The information about the uploaded files are kept under the user's own data. (e.g. "users/CURRENT_USER/uploads/..."
The title and detail properties change for each file. Where these properties come from is unclear, so I'm just going to assume they are passed in through the object metadata.
The code below should be enough to get you started on figuring out your own solution.
// the array of File objects to upload
const fileObjArray = [ ... ]
// the metadata to store with each file
const metadata = { ... }
// the current user's ID
const currentUserId = firebase.auth().currentUser.uid;
// Where to save information about the uploads
const databaseRef = firebase.database().ref("user").child(currentUserId).child('uploads');
// Create an ID for this set of uploaded files
const uploadId = storageRef.push().key;
// Save files to storage in a subfolder of the user's files corresponding to the uploadId
const storageRef = firebase.storage().ref("userFiles").child(currentUserId).child(uploadId);
// Upload each file in fileObjArray, then fetch their download URLs and then return an object containing information about the uploaded file
var uploadPromiseArray = fileObjArray.map((fileObj) => {
var uploadTask = storageRef.child(fileObj.name).put(fileObj, metadata)
return uploadTask.then(uploadSnapshot => {
// file uploaded successfully. Fetch url for the file and return it along with the UploadTaskSnapshot
return uploadSnapshot.ref.getDownloadURL().then((url) => {
return {
downloadUrl: url,
snapshot: uploadSnapshot
};
});
});
});
// uploadPromiseArray is an array of Promises that resolve as objects with the properties "downloadUrl" and "snapshot"
Promise.all(uploadPromiseArray)
.then((uploadResultArray) => {
var batchUploadData = {
timestamp: firebase.database.ServerValue.TIMESTAMP, // use the server's time
files: [],
... // other upload metadata such as reason, expiry, permissions, etc.
}
batchUploadData.files = uploadResultArray.map((uploadResult) => {
// rearrange the file's snapshot data and downloadUrl for storing in the database
return {
file: uploadResult.snapshot.name,
url: uploadResult.url,
title: uploadResult.snapshot.metadata.customMetadata.title,
detail: uploadResult.snapshot.metadata.customMetadata.detail
};
});
// commit the data about this upload to the database.
return databaseRef.child(uploadId).set(batchUploadData);
})
.then((dataSnapshot) => {
// the upload completed and information about the upload was saved to the database successfully
// TODO: do something
}, (err) => {
// some error occured
// - a file upload failed/was cancelled
// - the database write failed
// - permission error from Storage or Realtime Database
// TODO: handle error
});
// Warning: this line will be reached before the above code has finished executing
This is what it looks like on the database:
"users": {
"someUserId-78sda9823": {
"email": "example#example.com",
"name": "mr-example",
"username": "mrexemplary",
"uploads": {
"niase89f73oui2kqwnas98azsa": {
"timestamp": 1554890267823,
"files": {
"1": {
"file": "somefile.pdf",
"url": "https://firebasestorage.googleapis.com/v0/b/bucket/o/userFiles%2FsomeUserId-78sda9823%2Fsomefile.pdf",
"title": "Some File",
"detail": "Contains a report about some stuff"
},
"2": {
"file": "screenshot.png",
"url": "https://firebasestorage.googleapis.com/v0/b/bucket/o/userFiles%2FsomeUserId-78sda9823%2Fscreenshot.png",
"title": "Screenshot of problem",
"detail": "Contains an image that shows some stuff"
},
...
}
},
...
},
...
},
...
}
Note 1: This code is not yet complete. It is missing error handling for things like permission errors and incomplete file uploads. This is a problem for you to solve.
Note 2: Regarding incomplete file uploads, if any file fails to upload or get it's download URL successfully, the database will not be written to. One possible way to aid with this is to add a catch to uploadTask that returns null on error and then in the uploadResultArray.map(...) step skip any uploadResult variables that are null or write to the database that it failed for that particular file.
Note 3: Because Firebase Storage and the Realtime Database both use snapshots, try to keep them called uploadSnapshot/fileSnapshot and dataSnapshot respectively when using both in your code to minimise confusion. Similarly, name your references somethingStorageRef/somethingSRef and somethingDatabaseRef/somethingDBRef.
I'm really lost when it comes to file uploading in meteor and manage the data between client and server.
I'm using Meteor Files from Veliov Group to upload multiple images on the client side. They're getting stored in a FilesCollection called Images and I have my Mongo.Collection called Adverts.
collections.js:
Adverts = new Mongo.Collection('adverts');
Images = new FilesCollection({
collectionName: 'Images',
storagePath: () => {
return `~/public/uploads/`;
},
allowClientCode: true, // Required to let you remove uploaded file
onBeforeUpload(file) {
// Allow upload files under 10MB, and only in png/jpg/jpeg formats
if (file.size <= 10485760 && /png|jpg|jpeg/i.test(file.ext)) {
return true;
} else {
return 'Limit 10mb';
}
}
});
// if client subscribe images
if (Meteor.isClient) {
Meteor.subscribe('files.images.all');
};
// if server publish images
if (Meteor.isServer) {
Images.allowClient();
Meteor.publish('files.images.all', () => {
return Images.collection.find();
});
};
What I'm trying to achieve is, when I upload the images, I wanna store the URLs on the document in Adverts that I'm working with (I'm using iron:router to access those documents _id).
I managed to get the URL but only for the first image uploaded, my code for what I saw on the docs:
Template.imageUpload.helpers({
imageFile: function () {
return Images.collection.findOne();
},
myImage: () => {
console.log(Images.findOne({}).link())
}
})
Template.imageUpload.events({
'change #fileInput': function (e, template) {
if (e.currentTarget.files) {
_.each(e.currentTarget.files, function (file) {
Images.insert({
file: file
});
});
}
}
})
I was using a Meteor.Call to send the URL to the server, but I couldn't manage to update the document with a new property pic and the value url of the image
server.js:
imageUpload: (actDoc, imgURL) => { // actDoc is the document id that I'm working on the client
Adverts.update({'reference': actDoc}, {$set: {'pic': imgURL}})
},
This is probably a dumb question and everything might in the docs, but I've readed those docs back and forth and I can't manage to understand what I need to do.
The answer for my problem was to do it server side
main.js server
FSCollection.on('afterUpload'), function (fileRef) {
var url = 'http://localhost:3000/cdn/storage/images/' + fileRef._id + '/original/' + fileRef._id + fileRef.extensionWithDot;
}
MongoCollection.update({'_id': docId}, { $set: {url: imgUrl }}})
I am attempting to update a parse user field and the function stops in the middle of it:
Parse.Cloud.define("modifyAdminStatus", function(request, response) {
var userQuery = new Parse.Query(Parse.User);
var isAdmin = request.params.adminStatus;
console.log("isAdmin:" + isAdmin);
userQuery.equalTo("username", request.params.username);
userQuery.find({ useMasterKey: true,
success: function(user) {
console.log(user.length);
console.log("Got User")
console.log(user);
user.set("isAdmin", isAdmin);
console.log("Set Status");
user.save(null, {useMasterKey: true,
success: function(user) {
response.success();
},
error: function(error) {
response.error(error.message);
}
});
},
error: function(error) {
response.error(error.message);
}
});
});
I dont get any syntax errors, when i run the code i get:
1
Got User
[ ParseUser { _objCount: 2, className: '_User', id: '2vigcitsl6' } ]
in my console. However, it seems to stop the code after i attempt to set the admin status. I have tried running it using useMasterKey but that didnt do anything so maybe I'm missing something and where the useMasterKey should go?
The answer is:
query.find({
... code here
});
Returns an array, using query.first (or selecting one object from the array) instead will get one object and allow you to set things on it.
When you're trying to save the user, parse expects two parameters. The first should be an object containing any changes, and the second should be the save options.
So in your case, simply change your save to user.save (null, {useMasterKey:true, success...})
The way you have it now would create a column on Parse.User entitled useMasterKey, if permissions allow.
I have a javascript code that gets the xml list http://BUCKETNAME.s3.REGION.amazonaws.com/ of s3 bucket and uses it as a playlist:
AWS.config=
{ "accessKeyId": "ACCESS KEY",
"secretAccessKey": "SECRET KEY",
"region": "REGION" };
// Create S3 service object
s3 = new AWS.S3();
var params = {
Bucket: 'BUCKET NAME', /* required */
Delimiter: '',
EncodingType: 'url',
Marker: '',
MaxKeys: 0,
Prefix: '',
RequestPayer: 'requester'
};
s3.listObjects(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else
{
console.log('the list is approved '); // successful response
// Here is the function that convert the file list in the xml to an array
var b = document.documentElement;
b.setAttribute('data-useragent', navigator.userAgent);
b.setAttribute('data-platform', navigator.platform);
var radioName;
var radioTitle;
var tracklength= 0;
// setupPlayer function
function setupPlayer(href,name){
radioName= href;
radioTitle= name;
$.ajax({
type: "GET",
url: "http://BUCKETNAME.s3.REGION.amazonaws.com/?prefix=radio/"+radioName+"/",
dataType: "xml",
success: function(xml){
//tracklength=0;
tracks =[];
$(xml).find('Contents').each(function(){
tracklength=tracklength+1;
tracks.push({
"track": tracklength,
"file" : $(this).find('Key').text()
});
});
radio(tracks);
},
error: function() {
alert("An error occurred while processing XML file.");
}
});
}
}
As you can see, in this code I am taking the XML file and add a radio name (which is the folder name) , after that the ajax will save all the file names in this folder to an array tracks.
This code works perfectly if there is a list grantee permission for Everyone. So there is no need for aws config here. I can run the code inside else statement in listObjects function and it will give me the same response.
What I do want is to give the grant access to this key only, to make this function not work without the access key and secret key.
So no one can access the xml list except those who have the access and secret keys.
Is that possible ?
(This is not the full code, but you got the Idea, accessing the XML file of the bucket and getting the keys an saving them to an array).
You should use s3.getObject (http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getObject-property) to get your xml files instead of $.ajax call.
In Chrome Apps, I'm downloading a blob content from a server using JavaScript XHR (Angular $http GET in particular, with response type 'blob')
How should I save this to chrome application's file system?
Currently using an Angular wrapper on HTML5 filesystem API
https://github.com/maciel310/angular-filesystem
I do not want to show user a popup (hence I can't use chrome.fileSystem. chooseEntry )
The chrome.fileSystem.requestFileSystem API is only supported by Kiosk-only apps.
Hence I'm using HTML5 FileSystem API instead of chrome's.
I'm using following code to make XHR to fetch blob.
$http({
url: SERVER_URL+"/someVideo.mp4",
method: "GET",
responseType: "blob"
}).then(function(response) {
console.log(response);
fileSystem.writeBlob(response.name, response).then(function() {
console.log("file saved");
}, function(err) {
console.log(err);
});
}, function (response) {
});
This is my writeBlob method
writeBlob: function(fileName, blob, append) {
append = (typeof append == 'undefined' ? false : append);
var def = $q.defer();
fsDefer.promise.then(function(fs) {
fs.root.getFile(fileName, {create: true}, function(fileEntry) {
fileEntry.createWriter(function(fileWriter) {
if(append) {
fileWriter.seek(fileWriter.length);
}
var truncated = false;
fileWriter.onwriteend = function(e) {
//truncate all data after current position
if (!truncated) {
truncated = true;
this.truncate(this.position);
return;
}
safeResolve(def, "");
};
fileWriter.onerror = function(e) {
safeReject(def, {text: 'Write failed', obj: e});
};
fileWriter.write(blob);
}, function(e) {
safeReject(def, {text: "Error creating file", obj: e});
});
}, function(e) {
safeReject(def, {text: "Error getting file", obj: e});
});
}, function(err) {
def.reject(err);
});
return def.promise;
},
This shows SECURITY_ERR as It was determined that certain files are unsafe for access within a Web application, or that too many calls are being made on file resources.
What's the solution for this?
I've tried using --allow-file-access-from-files flag while launching app. It doesn't help.
Chrome Application's sandbox storage doesn't allow files to be stored in root directory (i.e. / )
Modify the code to save it in a specific sub-directory under it.
For example -
fileSystem.writeBlob("/new"+response.name, response).then(function() {
console.log("file saved");
}, function(err) {
console.log(err);
});
This would successfully save the file under /new/ directory.
To expand on this, here is a full example app on how to download a file and save the blob and display it back to the user.
https://github.com/PierBover/chrome-os-app-download-example