Storing Image Using Cloud Functions for Firebase - javascript

I'm trying to refactor some code to use Cloud Functions for Firebase. The code should store an image at a path in Firebase storage. For the most part the code is the exact same as before except now instead of
server.post('/', (req, res) => {
// Some code
}
I'm using the following according to the Firebase documentation
exports.getProcessedImage = functions.https.onRequest((req, res) => {
// Some code
});
The code worked previously but now I'm having trouble getting my test image to save to Firebase. Not sure why. I check the Network tab in developer tools and the getProcessedImage endpoint is triggering the code to run and it responds with a 200 code so not sure what the issue is. Thanks in advance for any help!
My full code is below :)
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
const request = require('request');
const crypto = require('crypto');
const storage = require('#google-cloud/storage');
// Firebase Project ID and Service Account Key.
const gcs = storage({
projectId: 'snapshelf-aabb55',
keyFilename: './serviceAccountKey.json'
});
const bucket = gcs.bucket('snapshelf-aabb55.appspot.com');
function saveImage(url) {
// Generate a random HEX string using crypto (a native node module).
const randomFileName = crypto.randomBytes(16).toString('hex');
// Fetch image info using a HTTP HEAD request.
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD
request.head(url, (error, info) => {
if (error) {
return console.error(error);
}
// Download image from Pixelz, then save the image to Firebase
// using the Google Cloud API and the magic of Node Streams.
// https://googlecloudplatform.github.io/google-cloud-node/#/docs/google-
cloud/v0.52.0/storage/file
// http://stackoverflow.com/questions/28355079/how-do-node-js-streams-work
request(url)
.pipe(
bucket.file(`sample/images/${randomFileName}`).createWriteStream({
metadata: {
contentType: info.headers['content-type']
}
})
)
.on('error', (err) => {
// Do something if the upload fails.
console.error(err);
})
.on('finish', () => {
// Do something when everything is done.
// Get download url for stored image
console.log('Image successfully uploaded to Firebase Storage!')
});
});
}
exports.getProcessedImage = functions.https.onRequest((req, res) => {
console.log(req.body.processedImageURL);
/*
if (req.body && req.body.processedImageURL) {
// Get image from Pixelz and save it to Firebase Storage.
saveImage(req.body.processedImageURL);
return res.status(200).end();
}
res.status(400).end();
*/
const url = 'https://www2.chemistry.msu.edu/courses/cem352/SS2017_Wulff/MichiganState.jpg'
console.log(url);
saveImage(url);
console.log('Saving url');
res.status(200).send();
});

Are you deploying your Function on Firebase with Spark Plan (Free)?
If the answer is yes, your problem is because of this:
Firebase projects on the Spark plan can make only outbound requests to Google APIs. Requests to third-party APIs fail with an error. For more information about upgrading your project.
Since you are trying to make an external request, nothing is happening when your Function is executed :(

Thats easy right now! Since you have admin. just change
const bucket = gcs.bucket('snapshelf-aabb55.appspot.com');
to
const bucket = admin.storage().bucket();

Related

How can node JS communicate with VueJs (appwrite SDK Question)

I'm trying to make users profiles dynamic in appwrite app. I want each user profile page to be accessible to all users so it goes like this (www.appname.com/users/{userid}).
I'm very new to node JS but i managed to install appwrite SDK for node and created a seperate folder for node and when i run the below code in node it gets me the user as expected in the terminal.
const sdk = require("node-appwrite");
// Init SDK
let client = new sdk.Client();
let users = new sdk.Users(client);
client
.setEndpoint("http://localhost/v1") // Your API Endpoint
.setProject("myProjectId") // Your project ID
.setKey(
"mykey"
); // Your secret API key
let promise = users.get("myUserId");
promise.then(
function (response) {
console.log(response);
},
function (error) {
console.log(error);
}
);
But I want to be able to use Vuejs to call out this outcome! I want to be able to use (users.get) from Vue component. How can I make this happen?
here is what I have tried till now:
I have created UserService.js file and added the below function to grab users.get from node Js
import users from "../../../api/server";
export async function getUser(userId) {
let promise = users.get(userId);
promise.then(
function (response) {
console.log(response);
},
function (error) {
console.log(error);
}
);
}
And I called it from my VueJS component
<script>
import { getUser } from "../../services/UserService";
export default {
name: "Profile",
props: ["id"],
data() {
return {
userprfile: false,
};
},
mounted() {
this.getUser();
},
methods: {
getUser() {
getUser(this.id).then((response) => {
console.log(response);
});
},
},
};
</script>
But it doesn't work
All I want is a way that allows me to use appwrite nodeJS SDK in my vueJS component. I need to be able to pass it the userID and get back the user in VueJS component
UPDATE:
The below code works and I can get now retrieve the data from appwrite NodeJS SDK to my browser but the problem is that I want this to be dynamic. I need a way to pass on UserID from vue to NodeJS sdk and retrieve the data.
const express = require("express");
const path = require("path");
const app = express(),
bodyParser = require("body-parser");
port = 3080;
// place holder for the data
const sdk = require("node-appwrite");
// Init SDK
let client = new sdk.Client();
let users = new sdk.Users(client);
client
.setEndpoint("http://localhost/v1") // Your API Endpoint
.setProject("myProjectID") // Your project ID
.setKey(
"MySecretApiKey"
); // Your secret API key
app.use(bodyParser.json());
app.use(express.static(path.join(__dirname, "../appwrite-app/build")));
app.get("/v1/users", (req, res) => {
console.log("api/users called!");
let promise = users.get("userId");
promise.then(
function (response) {
res.json(response);
},
function (error) {
console.log(error);
}
);
});
app.listen(port, () => {
console.log(`Server listening on the port::${port}`);
});
It looks like you are trying to use a node only module on the client (browser). You cannot use any module on the client that uses native node modules - in this case fs.
So what you need to do from your frontend application is send a request to your server application (API). On the API do any file system/database retrieval, then send the results back to the client.
It's very common to write the backend/frontend as separate applications - in separate folders and even store in separate repositories.
You should also never expose any secret keys on the client.
There may also be some confusion about the term 'client'. Most of the time it's used to refer to an application run in a web browser but you also get node sdk's which are 'clients' of the services they use - like node-appwrite in your case.

AWS S3 V3 Error trying to get list of objects inside a bucket. SignatureDoesNotMatch

I have Reactjs project created using create-react-app and a aws s3 bucket in witch I've saved some images that I want to display on my website.
I have created a aws.js where I configure and make the call like this
import { S3Client } from "#aws-sdk/client-s3";
import { ListObjectsV2Command } from "#aws-sdk/client-s3";
const REGION = 'eu-central-1'
const credentials = {
accessKeyId: accessKeyId,
privateKeyId: privateKeyId,
}
const config = {
region: REGION,
credentials: credentials,
}
const bucketName = {
Bucket: bucketName,
}
const s3Client = new S3Client(config);
export const run = async () => {
try{
const command = new ListObjectsV2Command(bucketName);
const data = await s3Client.send(command);
console.log("SUCCESS\n", data);
}
catch(err) {
console.log("ERROR\n", err);
}
}
I have also created a .env filder where I saved the keys with and without REACT_APP prefix but the result is the same. Invalidating the credentials.
For credentials I've checked and rechecked 10 times and I also created a new user and use those keys but nothing. I also configured CORS to allow access from my localhost.
What I'm doing wrong? And is there a complete documentation from A-Z on what to use AWS services? Including v3, api doc, credentials set up and everything.
P.S. It's my first time using AWS so some docs would be much apreciated. Thanks in advance
UPDATE---
I tried to use aws javascript sdk v2 and now it works. Here is the code that I used to list objects inside a bucket
But it works only when I used AWS.config.update if I passed the configuration to the bucket it still thrown an error
const AWS = require('aws-sdk');
AWS.config.update({
region: region,
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
let s3 = new AWS.S3()
export const testFnc = () =>{
s3.listObjects({
Bucket: 'artgalleryszili.digital'
}, (res, err) => {
if(err){
console.log(err);
}
else{
console.log(res);
}
})
}

How to download after uploading a file using express and multer?

I have uploaded the file in my backend filesystem using multer
My server is node and client is react.
I'm having trouble downloading and displaying the saved file on the client react
Whenever I do res.download(file) it just throws an error as connection refused on client side.
My code is as follows:
UserToUploadMapping.js
const mongoose = require("mongoose");
const UserToUploadMapping = new mongoose.Schema({
userId: {
type:String,
required:true
},
file: {
type: Object,
required: true,
},
date: {
type: Date,
default: Date.now,
},
});
module.exports = mongoose.model("UserToUploadMapping", UserToUploadMapping);
uploadVideo.js
const router = require("express").Router();
const multer = require('multer');
const UserToUploadMapping = require('../models/UserToUploadMapping')
let nameFile = ''
const storage = multer.diskStorage({
destination:'./Videos',
filename:(req,file,cb) => {
console.log(file)
nameFile = file.originalname + " "+ Date.now()
cb(null, nameFile)
}
})
const upload = multer({storage:storage})
router.post('/upload', upload.single('video'), async (req,res,next) => {
console.log("object")
const saveMapping = new UserToUploadMapping({
userId:'123',
file:req.file,
})
await saveMapping.save()
res.send("Video uploaded")
})
router.get('/download', async(req,res,next) => {
const x = await UserToUploadMapping.find()
// res.send(x)
res.download(x[0].path)
})
module.exports = router;
CLIENT
const fetchVideo = async () => {
const resp = await axios.get(
"http://localhost:5000/api/user/video/download"
);
console.log(resp)
};
return (
<>
<NavContainer />
<div className={classes.Post}>
<Input
type="file"
onChange={(e) => uploadVideos(e.target.files)}
accept="video/mp4"
/>
{/* <Button onClick={(e) => submitHandler(e)}>Upload</Button> */}
<video></video>
</div>
</>
);
Error
There is a few problems within the uploadVideo.js file :
to get the path from the data, you need to use x[0].file.path
(based on how you save the file in the database)
const saveMapping = new UserToUploadMapping({
userId:'123',
file:req.file,
})
to avoid problems about where the file uploadVideo.js is and where we run the application, you should use an absolute path when saving files in the system.
(small problem) your filename function will give filenames like this video.mp4 1622180824748. I think this is better "video-1622181268053.mp4" (we have the correct file extension)
You can refer to this code
const router = require("express").Router();
const multer = require('multer');
const UserToUploadMapping = require('../models/UserToUploadMapping')
const path = require('path');
const uploadFolder = path.join(__dirname, "Videos"); // use a variable to hold the value of upload folder
const storage = multer.diskStorage({
destination: uploadFolder, // use it when upload
filename: (req, file, cb) => {
// nameFile = file.originalname + " "+ Date.now() // --> give "video.mp4 1622180824748"
let [filename, extension] = file.originalname.split('.');
let nameFile = filename + "-" + Date.now() + "." + extension; // --> give "video-1622181268053.mp4"
cb(null, nameFile)
}
})
const upload = multer({ storage: storage })
router.post('/upload', upload.single('video'), async (req, res, next) => {
const saveMapping = new UserToUploadMapping({
userId: '123',
file: req.file,
})
await saveMapping.save()
res.send("Video uploaded")
})
router.get('/download', async (req, res, next) => {
const video = await UserToUploadMapping.find({});
res.download(video[0].file.path); // video[0].file.path is the absolute path to the file
})
module.exports = router;
Your code indicates you are handling large files (videos). I would strongly recommend looking at separation of concerns, handling this as part of your other business logic is not recommended based on my experience. This can e.g. complicate firewall rules and DDOS protection when that is needed in the future.
As a minimum, move upload and download into its own server, e.g. 'files.yourappnamehere.com' so that you can handle the specifics separately from your business logic api.
If you run in the public cloud, I would strongly recommend looking at reusing blob upload/download functionality, letting your clients upload directly to blob storage and also handling downloads directly from blob storage, e.g. in Azure, AWS or GCP.
This will save you a lot of the implementation details of handling (very) large files, and also give "free" extensibility options such as events on file upload completion.
You are running 2 apps Frontend and Backend with difference ports (3000, 5000) so browsers block cross domain requests. On Express you must enable CORS to allow request from FrontEnd Url (http://localhost:3000).
For the download route, try using window.location functions instead of using Axios.
It looks like you might have a typo in your get handler... you're referencing an element called 'path', but that's not declared in your schema
router.get('/download', async(req,res,next) => {
const x = await UserToUploadMapping.find()
// res.send(x)
res.download(x[0].path)//<-Path Doesn't seem to be in the schema
})
Since you don't have a try/catch in that function, the resulting error could be bringing down your server, making it unavailable
You might also want to take a look at this for more detail on How to download files using axios

Upload csv file to aws s3 bucket directly from a server

Happy weekend all,
I'm working on a task that fetches data from an API then store them into a csv file then from there directly upload to AWS S3 bucket. I've tried several ways but I'm currently stuck at the very last point. Any help would be much appreciate
My code below would demonstrate most of the problems and also what I've been trying so far.
First, I will fetch the data from an API
async systems() {
const endpoint = sampleEndPoints.SYSTEMS
return this.aggregateEndpoint(endpoint)
}
Second, I will get the data that fetched back and put them in a csv file as buffer. (Because I have to store them in fs.createReadStream later on)
// generate JSON to Buffer
async generateCsvToBuffer(json){
const {aws} = this.config
var ws = xlsx.utils.json_to_sheet(json)
var wb = xlsx.utils.book_new();
await xlsx.utils.book_append_sheet(wb, ws, 'Systems')
const csvParsed = xlsx.write(wb, { type: 'buffer'})
return csvParsed;
}
Third, I get the buffer data from that csvParsed in order to upload it to the amazon AWS S3. The problem is right here, that the Body: fileStream.path is supposed to show the content of the file but unfortunately, it logs out like this which coming from the fs.createReadStream
'{"type":"Buffer","data":[80,75,3,4,10,0,0,0,0,0,249,117,199,78,214,146,124
async uploadSample(file){
const {aws} = this.config
AWS.config.update({
secretAccessKey: aws.secretAccessKey,
accessKeyId: aws.accessKeyId,
region: 'us-east-2'
})
const bufferObject = new Buffer.from(JSON.stringify(file))
/*** WE NEED THE FILE SYSTEM IN ORDER TO STORE */
const fileStream = fs.createReadStream(bufferObject)
const uploadParams = {Bucket: aws.bucket, Key: aws.key, Body: fileStream.path}
const s3 = new AWS.S3()
await s3.upload(uploadParams,null,function(error, file){
if(error){
console.log(error)
} else {
console.log('Successfully uploaded')
}
})
}
All of my function will be executed in the server.js. So if you have a look at this then you can actually get the whole picture of the problem
app.get('/systems/parsed', async(req, res) => {
const Sample = await Sample()
//Fetch the data from an API
const systems = await Cache.remember('systems', async() => {
return Sample.systems()
})
const integration = await IntegrationInstance()
/** GET THE RESPONSE DATA AND PUT THEM IN A CSV FILE*/
const result = await integration.generateCsvToBuffer(systems)
const aws = await AwsInstance()
/*** GET THE SYSTEMS FILE (CSV FILE) THEN UPLOAD THEM INTO THE AWS S3 BUCKET*/
const awsUpload = await aws.uploadWorkedWithBuffer(result)
return res.send(awsUpload);
})
My only concern here is that, the file has successfully uploaded to the Amazon AWS S3, but the content of the file is still in Buffer. Any help on the existing function / any shorter way would much appreciate.
Here's my summarize again: fetch data from a server -> put on the Csv file as buffer BUT from a web browser -> and from there upload it to Amazon AWS S3 bucket -> Problem is file is uploaded but the content of the file is still in buffer.
It looks like you are making things more complicated than necessary here. According to the documentation .upload you can pass a buffer to the upload directly instead of creating a stream from the buffer. I suspect your underlying issue is passing the path from the stream instead of the stream itself though.
I actually solved it.
First, whenever you created the function generateCsvToBuffer remember to have a bookType on your wb (Workbook) in order for s3 to recognize it. The function should be something like this
async generateCsvToBuffer(json){
const {aws} = this.config
var ws = xlsx.utils.json_to_sheet(json)
var wb = xlsx.utils.book_new();
await xlsx.utils.book_append_sheet(wb, ws, 'Systems')
const csvParsed = xlsx.write(wb, { type: 'buffer', bookType: 'csv'})
return csvParsed;
}
Second, you have to import Content-Disposition: attachment into the uploadParams for the Aws Configuration
async uploadSample(file){
const {aws} = this.config
AWS.config.update({
secretAccessKey: aws.secretAccessKey,
accessKeyId: aws.accessKeyId,
region: 'us-east-2'
})
const bufferObject = new Buffer.from(JSON.stringify(file))
/*** WE NEED THE FILE SYSTEM IN ORDER TO STORE */
const fileStream = fs.createReadStream(bufferObject)
const uploadParams = {Bucket: aws.bucket, Key: aws.key, Body: fileStream.path}
const s3 = new AWS.S3()
await s3.upload(uploadParams,null,function(error, file){
if(error){
console.log(error)
} else {
console.log('Successfully uploaded')
}
})
}

Upload Image from Google Cloud Function to Cloud Storage

I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving the same error: ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Note that I can't generate a signed upload URL as suggested in this question since the application invoking this is an external, non-user application. I also can't upload directly to GCS since I'll be needing to make custom filenames based on some request metadata. Should I just be using Google App Engine instead?
Function code:
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({ headers: req.headers });
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function(file) {
storage
.bucket(bucketName)
.upload(file.path, {name: file.name})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}
I eventually gave up on using Busboy. The latest versions of Google Cloud Functions support both Python and Node 8. In node 8, I just put everything into async/await functions and it works fine.

Categories

Resources