Firebase firestore does not add anything but without an error - javascript

I tried to connect my react native app to the firebase as following. After executing this 'add post' function neither generating an error message or storing the data in firebase. But the photo uploading function is working as charm. Also I can authenticate and create new users. I would like to know the reason for this issue?
Current firebase dependency "firebase": "^7.14.2". I also inspected both "^6.6.2" and "^7.9.0"
Add post function
const remoteUri = await this.uploadPhotoAsync(localUri, `photos/${this.uid}/${Date.now()}`);
return new Promise((res, rej) => {
this. fireStore
.collection("posts")
.add({
text: text,
uid: this.uid,
timestamp: this.timeStamp,
image: remoteUri
})
.then(ref => {
res(ref);
})
.catch(error => {
rej(error);
});
});
};
Photo uploading function
uploadPhotoAsync=async (uri, filename) =>{
// const uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri;
//const path=`file://photos/${this.uid}/${Date.now()}.jpg`
//const path='/storage/emulated/0/Download/20200123_104742.jpg'
return new Promise(async (res, rej) => {
const response = await fetch(uri);
const file = await response.blob();
let upload = firebase
.storage()
.ref(filename)
.put(file);
upload.on(
"state_changed",
snapshot => {},
err => {
rej(err);
},
async () => {
const url = await upload.snapshot.ref.getDownloadURL();
res(url);
}
);
});

Related

Resizing images with sharp before uploading to google cloud storage

I tried to resize or compress an image before uploading to the google cloud storage.
The upload works fine but the resizing does not seem to work.
Here is my code:
const uploadImage = async (file) => new Promise((resolve, reject) => {
let { originalname, buffer } = file
sharp(buffer)
.resize(1800, 948)
.toFormat("jpeg")
.jpeg({ quality: 80 })
.toBuffer()
const blob = bucket.file(originalname.replace(/ /g, "_"))
const blobStream = blob.createWriteStream({
resumable: false
})
blobStream.on('finish', () => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
)
resolve(publicUrl)
}).on('error', () => {
reject(`Unable to upload image, something went wrong`)
})
.end(buffer)
})
I ran into the same issue with a project I was working on. After lots of trial and error I found the following solution. It might not be the most elegant, but it worked for me.
In my upload route function I created a new thumbnail image object with the original file values and passed it as the file parameter to the uploadFile function for google cloud storage.
Inside my upload image route function:
const file = req.file;
const thumbnail = {
fieldname: file.fieldname,
originalname: `thumbnail_${file.originalname}`,
encoding: file.encoding,
mimetype: file.mimetype,
buffer: await sharp(file.buffer).resize({ width: 150 }).toBuffer()
}
const uploadThumbnail = await uploadFile(thumbnail);
My google cloud storage upload file function:
const uploadFile = async (file) => new Promise((resolve, reject) => {
const gcsname = file.originalname;
const bucketFile = bucket.file(gcsname);
const stream = bucketFile.createWriteStream({
resumable: false,
metadata: {
contentType: file.mimetype
}
});
stream.on('error', (err) => {
reject(err);
});
stream.on('finish', (res) => {
resolve({
name: gcsname
});
});
stream.end(file.buffer);
});
I think the problem is with toFormat(). That function does not exist in the Docs. Can you try to remove it and check if it would work?
sharp(buffer)
.resize(1800, 948)
.jpeg({ quality: 80 })
.toBuffer()
Modify the metadata once you have finished uploading the image.
import * as admin from "firebase-admin";
import * as functions from "firebase-functions";
import { log } from "firebase-functions/logger";
import * as sharp from "sharp";
export const uploadFile = functions.https.onCall(async (data, context) => {
const bytes = data.imageData;
const bucket = admin.storage().bucket();
const buffer = Buffer.from(bytes, "base64");
const bufferSharp = await sharp(buffer)
.png()
.resize({ width: 500 })
.toBuffer();
const nombre = "IMAGE_NAME.png";
const fileName = `img/${nombre}.png`;
const fileUpload = bucket.file(fileName);
const uploadStream = fileUpload.createWriteStream();
uploadStream.on("error", async (err) => {
log("Error uploading image", err);
throw new functions.https.HttpsError("unknown", "Error uploading image");
});
uploadStream.on("finish", async () => {
await fileUpload.setMetadata({ contentType: "image/png" });
log("Upload success");
});
uploadStream.end(bufferSharp);
});

Uploading files with graphql to mongodb with mongoose

I want to upload file to mongodb with graphql resolver.
In server.js I have this help function to store file, which is exported to use it in my resolver.
The function is basing on what I found here: https://github.com/jaydenseric/graphql-upload/issues/8), but now some things have changed in graphql. For example destructurising file object. I don't know what should be found at path variable and how should I use this createReadStream(function which was destructurized from file).
const mongoose = require('mongoose');
const Grid = require('gridfs-stream');
const fs = require('fs');
//...
// Connect to Mongo
mongoose
.connect(process.env.mongoURI, {
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true,
useFindAndModify: false
}) // Adding new mongo url parser
.then(() => console.log('MongoDB Connected...'))
.catch(err => console.log(err));
const storeFile = async (upload) => {
const { filename, createReadStream, mimetype } = await upload.then(result => result);
const bucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, { bucketName: 'files' });
const uploadStream = bucket.openUploadStream(filename, {
contentType: mimetype
});
createReadStream()
.pipe(uploadStream)
.on('error', console.log('error'))
.on('finish', console.log('finish'));
}
module.exports = { storeFile }
//...
My resolver(here it's minimal version, because now I want only to upload file into my database. In one of my tries, it even created fs.files and fs.chunks collections, but without a data):
Mutation: {
uploadFile: async (_, { file }) => {
console.log(file);
const fileId = await storeFile(file);
return true;
}
}
I have this error now:
Unhandled Rejection (Error): GraphQL error: The "listener" argument
must be of type function. Received undefined
and in terminal I have printed 'error'(like in pipe.on('error', console.log('error') statement )
And I can upload only small files( max 60 kb), all larger just don't upload, but errors are showing on all tries.
Ok, I managed to solve it.
resolver mutation:
const { storeFile } = require('../../server');
//...
uploadFile: async (_, { file }) => {
const fileId = await storeFile(file).then(result => result);
return true;
// later I will return something more and create some object etc.
}
supporting function from server.js
const storeFile = async (upload) => {
const { filename, createReadStream, mimetype } = await upload.then(result => result);
const bucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, { bucketName: 'files' });
const uploadStream = bucket.openUploadStream(filename, {
contentType: mimetype
});
return new Promise((resolve, reject) => {
createReadStream()
.pipe(uploadStream)
.on('error', reject)
.on('finish', () => {
resolve(uploadStream.id)
})
})
}
module.exports = { storeFile }

How to solve "File name too long" while uploading local Image to Firestore and Firebase Storage in React Native?

I'm trying to upload local images to Cloud Firestore and Firebase storage.
The PACKAGES I'm using are
react-native-image-picker to choose an image from local
rn-fetch-blob to transfer files as blob to Firebase storage
Here's the 2 functions I'm using:
export const uploadPost = async (postImageUri) => {
return new Promise(async(res, rej) => {
const fs = RNFetchBlob.fs;
fs.readFile(postImageUri,'hello','base64')
.then(async (data) => {
console.log('data from RNFetchBlob', data);
const storageRef = storage().ref(`posts/images`).child(`${data}`);
try {
storageRef.putFile(data,{contentType: 'image/jpeg'})
.on(
storage.TaskEvent.STATE_CHANGED,
snapshot => {
console.log('snapshot', snapshot.state);
console.log('progress', (snapshot.bytesTransferred)/(snapshot.totalBytes)*100);
if(snapshot.state === storage.TaskState.SUCCESS){
console.log('SUCCESS')
}
},
error => {
console.log('Image upload error', error);
rej(error)
},
() => {
storageRef.getDownloadURL()
.then((downLoadUri) => {
console.log('File available at: ', downLoadUri);
// addPostInfo
res(downLoadUri)
})
}
)
}catch{err => console.log(err) }
})
} )
}
export const addPostInfo = async (post) => {
console.log('post.postImageUri in addPostInfo', post.postImageUri.slice(10))
const remoteUri = await uploadPost(post.postImageUri);
return new Promise((res, rej) => {
firestore()
.collection('posts')
.add({
id: post.id,
createdAt: post.date,
description: post.description,
image: remoteUri,
location: post.checkin_location
})
.then(ref => {
console.log('Finish adding in addPostInfo', ref)
res(ref)
})
.catch(err => rej(err))
})
}
And in the main screen, I directly send the image's path to the addPostInfo:
const _onSharingPost = () => {
const postCreated = {
id: 'alkdgjhfa;oiughaoghus',
postImageUri: postImageUri,
date: _getCurrentDate(),
description: postDescription,
checkin_location: checkInLocation
}
postActions.addPostInfo(postCreated);
}
The path is like this:
content://com.google.android.apps.photos.contentprovider/0/1/content%3A%2F%2Fmedia%2Fexternal%2Fimages%2Fmedia%2F58/ORIGINAL/NONE/image%2Fjpeg/950379202
ERROR
...base64 string --> File name too long
It got down to the SUCCESS part of the upLoadPost function but then still got error
I HAVE TRIED:
Change the path --> cut off the content://
Use XMLHttpRequest to send the blob to the upLoadPost to send that to the storage
As far as I know, the problem must be the base64 string converted from the image path. I know we need to change that to blob or something, but I don't know how. I can't find where this can be specified in rn-fetch-blob docs
PLEASE HELP ME

RNFetchBlob blocked firebase/firestore call react native

I have a react native app that uses Firebase/firestore. For uploading images, I am using "react-native-fetch-blob" to create a Blob.
I am trying to fetch doc from firestore, but my app is blocked and not getting any response from firestore (not catch / nothing => just passing thru the code).
Is there anything I can do for getting docs from firestore?
Below is my code
import firebase from "../../../firebaseConfig";
import RNFetchBlob from "react-native-fetch-blob";
// Prepare Blob support
window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest;
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
window.Blob = Blob;
const originalXMLHttpRequest = window.XMLHttpRequest;
const originalBlob = window.Blob;
var firebaseUid = "";
componentDidMount = async () => {
firebaseUid = await firebase.auth().currentUser.uid;
this.getMyStory();
};
getMyStory = async () => {
window.XMLHttpRequest = originalXMLHttpRequest;
window.Blob = originalBlob;
var docRef = await firebase
.firestore()
.collection("demo")
.doc(firebaseUid)
.collection("ArrayDoc");
docRef
.get()
.then(querySnapshot => {
querySnapshot.forEach(doc => {
console.log(doc.id, " => ", doc.data());
});
})
.catch(error => {
console.log("error", JSON.stringify(error));
});
};
Error Screen

BINANCE API - How to get Account info with User Data Stream

I'm using Node and the ws npm package to work with WebSockets. Got the listenKey as stated in the docs (below), but I'm unable to get my account info using User Data Stream. I'd prefer to use a stream to read my most current account info (balances, etc) since using the Rest API to do it incurs a penalty (WEIGHT: 5) each time.
I've tried doing ws.send('outboundAccountInfo') but no joy.
DOCS: https://github.com/binance-exchange/binance-official-api-docs/blob/master/user-data-stream.md
Full code example - does not return any data:
import request from 'request'
import WebSocket from 'ws'
import { API_KEY } from '../../assets/secrets'
const DATA_STREAM_ENDPOINT = 'wss://stream.binance.com:9443/ws'
const BINANCE_API_ROOT = 'https://api.binance.com'
const LISTEN_KEY_ENDPOINT = `${BINANCE_API_ROOT}/api/v1/userDataStream`
const fetchAccountWebsocketData = async() => {
const listenKey = await fetchListenKey()
console.log('-> ', listenKey) // valid key is returned
let ws
try {
ws = await openWebSocket(`${DATA_STREAM_ENDPOINT}/${listenKey}`)
} catch (err) {
throw(`ERROR - fetchAccountWebsocketData: ${err}`)
}
// Nothing returns from either
ws.on('message', data => console.log(data))
ws.on('outboundAccountInfo', accountData => console.log(accountData))
}
const openWebSocket = endpoint => {
const p = new Promise((resolve, reject) => {
const ws = new WebSocket(endpoint)
console.log('\n-->> New Account Websocket')
ws.on('open', () => {
console.log('\n-->> Websocket Account open...')
resolve(ws)
}, err => {
console.log('fetchAccountWebsocketData error:', err)
reject(err)
})
})
p.catch(err => console.log(`ERROR - fetchAccountWebsocketData: ${err}`))
return p
}
const fetchListenKey = () => {
const p = new Promise((resolve, reject) => {
const options = {
url: LISTEN_KEY_ENDPOINT,
headers: {'X-MBX-APIKEY': API_KEY}
}
request.post(options, (err, httpResponse, body) => {
if (err)
return reject(err)
resolve(JSON.parse(body).listenKey)
})
})
p.catch(err => console.log(`ERROR - fetchListenKey: ${err}`))
return p
}
export default fetchAccountWebsocketData
Was stuggling too .... for hours !!!
https://www.reddit.com/r/BinanceExchange/comments/a902cq/user_data_streams_has_anyone_used_it_successfully/
The binance user data stream doesn't return anything when you connect
to it, only when something changes in your account. Try running your
code, then go to binance and place an order in the book, you should
see some data show up*

Categories

Resources