I want to add the image reference of my uploaded image into firestore database. My code is uploading the image into the firebase storage but not save the data into the database. I am getting the error:
Uncaught FirebaseError: Function DocumentReference.set() called with invalid data. Unsupported field value: a custom je object (found in field Image)
What is the correct way to do this?
Please see codes below:
console.log("Initialisation Successful!");
var db = firebase.firestore();
var storageRef = firebase.storage().ref().child('Images');
function addExercise(){
var exerciseName = document.getElementById("ename").value;
var exercisePart = document.getElementById("body_part").value;
var exerciseLevel = document.getElementById("elevel").value;
var file = document.getElementById("eimage").files[0];
var thisRef = storageRef.child(file.name);
thisRef.put(file).then(function(snapshot) {
console.log('done!' + thisRef );
});
db.collection("Exercises").add({
Name: exerciseName,
BodyPart: exercisePart,
Level: exerciseLevel,
Image: thisRef
})
.then(function(){
console.log("Data entered successfully!");
})
.catch(function(error){
console.error("Error!", error);
});
}
You probably just want to save the full path of the reference, with thisRef.fullPath, as this is a simple string and not a full object.
To later translate that to an object that you could read, you'd do something like:
firebase.storage().ref().child(fullPath)
This obviously assumes the bucket hasn't changed -- otherwise you'd need to store the bucket name as well.
See more details here: https://firebase.google.com/docs/storage/web/create-reference.
Related
I am working on a PWA project where I have created an IndexedDB and stored the images and videos in it. On the next reload of page, if image/video is available in IndexedDB, it should load it from there.
function fetchMedia(id) {
var transaction = db.transaction(["media"]);
var objectStore = transaction.objectStore("media");
var request = objectStore.get(id);
request.onerror = function(event) {
console.log("Unable to retrieve daa from database!");
return "";
};
request.onsuccess = function(event) {
var imgFile = request.result;
console.log(imgFile)
var imgURL = window.URL.createObjectURL(imgFile);
return imgURL;
};
}
It always returns undefined.
When I console.log the imgFile, it shows that it's there in the IndexedDB:
File in IndexedDB:
I have also tried this but no success yet:
var imgURL = window.URL.createObjectURL(new Blob(imgFile, {'type': 'application/octet-stream'}));
What's the correct approach to load the files from IndexedDB?
If your screenshot is accurate, then request.result is not an image, it's an object {id: '13388-7247-6247-62584', file: Blob, ...}. You didn't just store the image file, you wrapped it in an object. So what you're getting back out is an object.
Try imgFile = request.result.file; instead.
You cannot return inside the callback, you should wrap it in a promise.
Hi I want to delete a file from firebase storage. My storage hierarchy is simple and looks like this.
I searched for the method on how to delete file from firebase storage i found this.
function deletingFile(fileName, file, b ) {
// Create a reference to the file to delete
// const storageRef = firebase.storage().ref();
var desertRef = firebase.storage().ref().child(fileName);
// Delete the file
desertRef.delete().then(function () {
alert('file deleted successfully');
var delImg = firebase.database().ref('students').child(abc).once('value')
.then(function (data) {
data.val().file.set() = {};
})
$(b).parents('tr').remove();
// File deleted successfully
}).catch(function (error) {
alert(error)
// Uh-oh, an error occurred!
});
} // function deleting file ended here
Now when I am calling this function the following error is obtained, I am unable to resolve what is wrong here. fileName == 'DailyEase Color Theme.txt'
I'm able to retrieve the data but it's only showing the data of the last user in the database. The users are listed by their uid. Below are the code and screenshot of the data from firebase. Help me out, guys!
var database = firebase.database().ref('users');
database.on("child_added", function(snapshot) {
var snapVal = snapshot.val();
displayName.innerHTML = snapVal.mUsername;
console.log(snapVal);
var badge = document.createElement('span');
badge.textContent = 'view details';
badge.setAttribute('class','badge');
var list = document.createElement('a');
list.textContent = snapVal.mUsername;
list.setAttribute('href', '#');
list.setAttribute('class', 'list-group-item');
patientList.appendChild(list);
list.appendChild(badge);
var userIcon = document.createElement('i');
userIcon.setAttribute('class','far fa-user');
list.insertBefore(userIcon, list.childNodes[0])
},
function (errorObject) {
console.log("The read failed: " + errorObject.code);
});
You should refer to the specific user's node to retrieve the information.
Your code should look like:
var userRef = firebase.database().ref(`users/${user_node_key}`); // user_node_key should be the node containing the user info.
You should be able to get the node key using the current user's object.
and then call once on the userRef if you just need to get the detail once or on if you need to listen all the changes on that user.
The code should look like:
userRef.once('value').then(function(snapshot) {
//use snapshot to get the users data
});
When updating an image in Google Cloud bucket, even if the image update is successful, the url serves the old version for a while (few minutes, e.g. 5 min or so).
The link we are using looks like:
https://storage.googleapis.com/<bucket-name>/path/to/images/1.jpg
The relevant part of the code which updates the image is:
var storageFile = bucket.file(imageToUpdatePath);
var storageFileStream = storageFile.createWriteStream({
metadata: {
contentType: req.file.mimetype
}
});
storageFileStream.on('error', function(err) {
...
});
storageFileStream.on('finish', function() {
// cloudFile.makePublic after the upload has finished, because otherwise the file is only accessible to the owner:
storageFile.makePublic(function(err, data) {
//if(err)
//console.log(err);
if (err) {
return res.render("error", {
err: err
});
}
...
});
});
fs.createReadStream(filePath).pipe(storageFileStream);
It looks like a caching issue on the Google Cloud side. How to solve it? How to get the updated image at the requested url, after being updated?
In the Google Cloud admin, the new image does appear correctly.
By default, public objects get cached for up to 60 minutes - see Cache Control and Consistency. To fix this, you should set the cache-control property of the object to private when you create/upload the object. In your code above, this would go in the metadata block, like so:
var storageFileStream = storageFile.createWriteStream({
metadata: {
contentType: req.file.mimetype,
cacheControl: 'private'
}
});
Reference: https://cloud.google.com/storage/docs/viewing-editing-metadata#code-samples_1
1. await bucket
.file(filePath)
.delete({ ignoreNotFound: true });
// Deleting file with a name.
const blob = bucket.file(filePath);
2. await blob.save(fil?.buffer);
//Saving File with the same name
3. const [metadata] = await storage
.bucket(bucketName)
.file(filePath)
.getMetadata();
newDocObj.location = metadata.mediaLink;
I have used metadata.mediaLink to get the
latest download link of the image from Google Bucket Storage.
I've come across a problem in uploading a large csv file to Azure's Table Storage, in that it appears to stream the data from it so fast that it doesn't upload properly or throws a lot of Timeout Errors.
This is my current code:
var fs = require('fs');
var csv = require('csv');
var azure = require('azure');
var AZURE_STORAGE_ACCOUNT = "my storage account";
var AZURE_STORAGE_ACCESS_KEY = "my access key";
var tableService = azure.createTableService(AZURE_STORAGE_ACCOUNT,AZURE_STORAGE_ACCESS_KEY);
var count = 150000;
var uploadCount =1;
var counterror = 1;
tableService.createTableIfNotExists('newallactorstable', function(error){
if(!error){
console.log("Table created / located");
}
else
{
console.log("error");
}
});
csv()
.from.path(__dirname+'/actorsb-c.csv', {delimiter: '\t'})
.transform( function(row){
row.unshift(row.pop());
return row;
})
.on('record', function(row,index){
//Output plane carrier, arrival delay and departure delay
//console.log('Actor:' + row[0]);
var actorsUpload = {
PartitionKey : 'actors'
, RowKey : count.toString()
, Actors : row[0]
};
tableService.insertEntity('newallactorstable', actorsUpload, function(error){
if(!error){
console.log("Added: " + uploadCount);
}
else
{
console.log(error)
}
});
count++
})
.on('close', function(count){
console.log('Number of lines: '+count);
})
.on('error', function(error){
console.log(error.message);
});
The CSV file is roughly 800mb.
I know that to fix it, I probably need to send the data in batches, but I have literally no idea how to do this.
I have no knowledge of the azure package nor the CSV package, but I would suggest you to upload the file using a stream. If you have the file saved to your drive you can create a read stream from it, and then use that stream to upload to azure using createBlockBlobFromStream. That question redirects me here. I suggest you to take a look at that, as it handles the encoding. The code provides a way to convert the file to a base64 string, but i have the idea that can be done more efficiently using node. I will have to look into that though.
hmm What I would suggest is to upload your file to blob storage and you can have reference to blob URI in your table storage. Block blob option give you an easy way of batch upload.