Firebase admin get file from Storage with temporary download url - javascript

I am trying implement a function which provides a temporary file url for downloading. Right now user enter his/her email id and function response with a file url. To fetch the file I am using firebase-admin SDK to access storage bucket and retrive a file.
Problem : Right now everything is working fine but the file URL is not working.. I am getting the error: Anonymous caller does not have storage.objects.get access to the Google Cloud Storage object.
app.post('/getFile', (req, res) => {
let email = req.body.mail;
if (micro.validateEmail(email)) {
micro.checkIfUserExists(admin, email)
.then(data => {
storage.file('form.pdf')
.getMetadata()
.then(fileRes => {
sendMail.send(email, fileRes[0].mediaLink).then(msg => {
res.send({
uri: fileRes,
res: msg,
mail: email,
data: data
})
});
});
});
}
});
Edit:
So I have found that I could use getSignedURL to get a url but I don't know how to specifiy a file path, for which I url is needed.
const bucket = storage.bucket();
//-
// Generate a URL that allows temporary access to list files in a bucket.
//-
const config = {
action: 'list',
expires: '03-17-2025'
};
//-
// If the callback is omitted, we'll return a Promise.
//-
bucket.getSignedUrl(config).then(function(data) {
const url = data[0];
});
Edit 2
So I have found that you can specify the file by specifying the bucket.file('')
const bucket = storage.bucket().file('form.pdf');
But now I have another problem. I am getting this error in firebase function log
Permission iam.serviceAccounts.signBlob is required to perform this operation on service account

Related

How can I locally test cloud function invocation from third-party in Firebase?

I am developing an API for a third-party application not related to Firebase. This API consist of cloud functions to create and add users to database, retrieve user information and so on. These functions are created using the admin SDK. Example of a function that adds a user looks like this:
export const getUser = functions.https.onRequest(async (req, res) => {
res.set('Access-Control-Allow-Origin', '*');
if (req.method === 'OPTIONS') {
res.set('Access-Control-Allow-Headers', 'Content-Type');
res.set('Access-Control-Max-Age', '3600');
res.status(204).send('');
} else {
const utils = ethers.utils;
const method = req.method;
const body = req.body;
const address = body.address;
const userAddress = utils.getAddress(address);
let logging = "received address: " + address + " checksum address: " + userAddress;
let success = false;
const db = admin.firestore();
const collectionRef = db.collection('users');
// Count all matching documents
const query = collectionRef.where("userAddress", "==", userAddress);
const snapshot = await query.get();
// If no documents match, there is no matching user
console.log(snapshot.docs.length);
if (snapshot.docs.length != 1) {
logging += "User does not exist in database.";
res.send({success: success, logging: logging});
return;
}
const data = snapshot.docs[0].data();
if (data != undefined) {
const createdAt = data.createdAt;
const emailAddress = data.emailAddress;
const userAddress = data.userAddress;
const updatedAt = data.updatedAt;
const userName = data.userName;
success = true;
res.send({success: success, createdAt: createdAt, emailAddress: emailAddress, userAddress: userAddress, updatedAt: updatedAt, userName: userName, logging: logging});
}
}
});
NOTE: These functions are NOT going to be called by the third-party application users, only by the third-party application itself.
I am pretty new at programming so I understand that this may not be the best way to code this functionality and I'm greatful for any tips you might have here as well. Anyway, back to my question. I'm trying to mimic the way that my customer is going to invoke these functions. So to test it, I'm using the following code:
function runGetUser() {
// test values
const address = 'myMetaMaskWalletAddress';
axios({
method: 'POST',
url: 'http://127.0.0.1:5001/cloud-functions/us-central1/user-getUser',
data: { "address": address },
}).then((response) => {
console.log(response.data);
}).catch((error) => {
console.log(error);
});
};
This works fine. However, I do not want anyone to be able to invoke these functions when I actually deploy them later. So I have been reading Firebase docs and googling on how to setup proper authentication and authorization measures. What I have found is setting up a service account and using gcloud CLI to download credentials and then invoke the functions with these credentials set. Is there not a way that I could configure this so that I query my API for an authorization token (from the file where the axios request is) that I then put in the axios request and then invoke the function with this? How do I do this in that case? Right now also, since I'm testing locally, on the "cloud function server-side" as you can see in my cloud function example, I'm allowing all requests. How do I filter here so that only the axios request with the proper authorization token/(header?) is authorized to invoke this function?
Thank you for taking the time to read this. Best regards,
Aliz
I tried following the instructions on this page: https://cloud.google.com/functions/docs/securing/authenticating#gcloud where I tried to just invoke the functions from the Gcloud CLI. I followed the instructions and ran the command "gcloud auth login --update-adc", and got the response: "Application default credentials (ADC) were updated." Then I tried to invoke a function I have "helloWorld" to just see that it works with the following command: curl -H "Authorization: bearer $(gcloud auth print-identity-token)" \http://127.0.0.1:5001/cloud-functions/us-central1/helloWorld", and I got the following response: "curl: (3) URL using bad/illegal format or missing URL". So I don't know what to do more.

Express/NodeJS - RESTful way of uploading images and filtering products

I want to transform my application in REST convention and I don't know what is the correct form of uploading an image or applying filter for products.
About uploading image: on my client side user uploads an image, then I save it on my backend. Then I get back the image path from my backend (because I use a product preview and I need to see the image). And after when the users submits his product I just save the uploaded images path. So the image upload is not dependent on product upload, but it's strongly connected.
About my filtering: the filtering parameters and values are not static, these are always modifying data, so I used POST for this and I get the filter values from my request body as a JS object.
These are the calls from my client side:
export const uploadImage = (formData,config) => API.post('/product/uploadImage',formData,config)
export const getAllProducts = (querydata) => API.post('/product/getProducts',querydata)
My server routing:
router.post('/uploadImage', uploadImage)
router.post('/getProducts',getProducts)
My controllers:
var upload = multer({ storage: storage }).single("file")
export const uploadImage = (req, res) => {
upload(req, res, err => {
if (err) {
return res.json({ success: false, err })
}
return res.json({ success: true, image: res.req.file.path, fileName: res.req.file.filename })
})
};
export const getProducts= async (req) => {
const findArgs={}
req.body.filters.forEach((categoryObject) => {
let objectKey=Object.keys(categoryObject)[0]
findArgs[objectKey]={$in:categoryObject[objectKey]}
})
...
How can I modify this to have a RESTful structure? Thanks in advance.
Although there is no specific REST specification, your methods should be generally defined in this way:
GET: /products
Get all products (you can also define a filter using query parameters eg: GET: /products?category=book&author=hawkins)
POST: /products - Create a new product
DELETE: /products/:id - Delete a product
PATCH: /products/:id - Edit a product
POST: /products/:id/image - Add an image to the product
GET: /products/:id - Fetch a single product
You can read more here: https://learn.microsoft.com/en-us/azure/architecture/best-practices/api-design

How do I upload a string as a file to Google Drive?

here's my problem :
I want to create a Google Sheets extension in which I basically extract data from a sheet in Google Sheets, that I modify using methods in node JS.
Then, having the data that I modified in a string, I want to upload that string into the client's Drive, in a csv or xml file. Therefore I don't have a local file that I can use to upload the file, just a string variable.
How do I upload that string ?
Thanks a lot, that's my first app and I'm struggling a bit.
Code
const {google} = require ('googleapis');
const keys = require ('./keys.json');
const client = new google.auth.JWT(
keys.client_email, null,
keys.private_key,
['googleapis.com/auth/drive'],
'https://www.googleapis.com/…'
);
client.authorize(function(err, tokens){
if (err){
console.log(err);
return
} else {
console.log('Connected');
gsrun(client);
}
});
async function gsrun(cl) {
const gsapi = google.sheets({version: 'v4', auth: cl});
}
You have to set your file's metadata and the data it will contain (it's important the MIME type for this case must be text/csv) and the file's body will be a simple string. This code will help you taking into consideration you already did the OAuth process and have the string you want to insert:
module.exports.init = async function (){
// Before calling the API, build your own Drive service instance
// In the second argument, you must pass your own string message
const pro = await uploadSimpleString(drive, null);
console.log(pro);
}
uploadSimpleString = (drive, message) => {
// Set file metadata and data
message = message || 'This is a simple String nice to meet you';
const fileMetadata = {'name': 'uploadSimpleStringt.csv'};
const media = {
mimeType: 'text/csv',
body: message
};
// Return the Promise result after completing its task
return new Promise((resolve, reject) => {
try{
// Call Files: create endpoint
return drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id'
},(err, results) => {
// Result from the call
if(err) reject(`Drive error: ${err.message}`);
resolve(results);
})
} catch (error){
console.log(`There was a problem in the promise: ${error}`);
}
});
}
Notice
To test this code, run it in your CLI using this command:
node -e 'require("./index.js").init()'
Where index.js is your file's name and init() is your main function.
Docs
For more info, please check these links and also consider using the [google-drive-api] tag in that way, there are more chances to receive help because more people will be able to find your question.
How to get Help
Files: create
G Suite documents and corresponding export MIME types

In electron, how to upload a file from it's full filename

In my electron app I have a button that says "upload contract".
When clicked it looks from the stored contract's filename ie. /home/users/filename.docx and gets it (no need for a dialog window).
Now, I have problems uploading this file as a multipart form data while using axios.
I've read about this issue, asking for file upload on axios which led to this pull request for file upload on browser and this pull for uploading in node.js.
I've read through the issues and comments applying bit by bit but seems to have troubles getting the real file uploading right using axios.
Here is what I have been doing.
Upload as browser
export function generateContract(fileUrl, customer) {
const data = new FormData()
const file = fs.createReadStream(fileUrl)
data.append('names', customer.names)
data.append('email', customer.email)
data.append('contract', file)
return data
}
//- In between generateContract & uploadFile function some function
//- gets the generatedContract data & push it to uploadFile for axios uploading.
//- In API file.
export function uploadFile(formData) {
return checkConnetion()
.then(() => {
return axios.post(emailUrl, formData)
})
.catch(() => {
return axios.post(emailUrlOffline, formData)
})
}
In generatedContract I'm using FormData that (I assume) is a global function in browsers to create a form instance for uploading files.
I'm also using fs.createReadStream cause without it my file upload looks like an object with three strings (the fileUrl gets to be a normal string).
However, uploading like this and console.loging my request body from my local server I get a hopeless
{ names: 'Mwajuma Salmin',
email: 'sammwaj#yahoo.com',
contract: '[object Object]' }
The contract file is `[object Object]'.
Uploading as node.js
Everything is the same, except I'm using npm's form-data.
Output is.
{ '[object FormData]': '' }
Where am I messing up?
I tried uploading with postman and manually selected the file to upload as form-data and that other data (email & names), it was all successful!
Output.
{ names: 'maotora',
contract:
[ File {
domain: null,
_events: {},
_eventsCount: 0,
_maxListeners: undefined,
size: 11609,
path: '/tmp/upload_0af5c038e147888f0a7b89ad4784a4dc',
name: 'Don Joe_08-06-17\'s_contract-for Plot 011.pdf',
type: 'application/pdf',
hash: null,
lastModifiedDate: 2017-09-28T18:56:31.083Z,
_writeStream: [Object] } ] }
I can't use form uploading in the app (just the button to upload without opening a dialog to select file) I only have the filename to the file.
Thanks.
This question has been open for a sometime since I couldn't solve the issue with axios and had to use superagent for a while.
However, now that it's solved and can use axios just fine, this is how I did solved it on my project.
//- Correctly placing a file_Url into FormData
export function generateContract(url, {names, email}) {
const file = {
uri: url,
name: _.replace(url, /\.[doc,docx,odt,]+$/, ''),
type: 'docx'
}
const formData = new FormData()
formData.append('contract', file)
formData.append('names', names)
formData.append('email', email)
return formData
}
//- 1. Calling generateContract method
//- 2. Passing the formdata (from method 1) to sendEmail
const contractData = generateContract(contractUrl, payload)
const response = yield sendEmail(contractData)
//- The sendEmail function that makes requests using axios
export function sendEmail(contractData) {
return checkConnetion()
.then(() => {
return axios.post(emailUrl, contractData)
})
.catch(() => {
// return axios.post(emailUrlOffline, contractData)
userLog('Cannot connect to server, check your internet settings.', 'Connection Error', 'error')
})
}

Download file from Amazon S3 using REST API

I have my own REST API to call in order to download a file. (At the end, the file could be store in different kind of server... Amazon s3, locally etc...)
To get a file from s3, I should use this method:
var url = s3.getSignedUrl('getObject', params);
This will give me a downloadable link to call.
Now, my question is, how can I use my own rest API to download a file when it comes from that link? Is there a way to redirect the call?
I'm using Hapi for my REST server.
{
method: "GET", path: "/downloadFile",
config: {auth: false},
handler: function (request, reply) {
// TODO
reply({})
}
},
Instead of using a redirect to download the desired file, just return back an unbufferedStream instead from S3. An unbufferedStream can be returned from the HttpResponse within the AWS-SDK. This means there is no need to download the file from S3, then read it in, and then have the requester download the file.
FYI I use this getObject() approach with Express and have never used Hapi, however I think that I'm pretty close with the route definition but hopefully it will capture the essence of what I'm trying to achieve.
Hapi.js route
const getObject = require('./getObject');
{
method: "GET", path: "/downloadFile",
config: {auth: false},
handler: function (request, reply) {
let key = ''; // get key from request
let bucket = ''; // get bucket from request
return getObject(bucket, key)
.then((response) => {
reply.statusCode(response.statusCode);
response.headers.forEach((header) => {
reply.header(header, response.headers[header]);
});
return reply(response.readStream);
})
.catch((err) => {
// handle err
reply.statusCode(500);
return reply('error');
});
}
},
getObject.js
const AWS = require('aws-sdk');
const S3 = new AWS.S3(<your-S3-config>);
module.exports = function getObject(bucket, key) {
return new Promise((resolve, reject) => {
// Get the file from the bucket
S3.getObject({
Bucket: bucket,
Key: key
})
.on('error', (err) => {
return reject(err);
})
.on('httpHeaders', (statusCode, headers, response) => {
// If the Key was found inside Bucket, prepare a response object
if (statusCode === 200) {
let responseObject = {
statusCode: statusCode,
headers: {
'Content-Disposition': 'attachment; filename=' + key
}
};
if (headers['content-type'])
responseObject.headers['Content-Type'] = headers['content-type'];
if (headers['content-length'])
responseObject.headers['Content-Length'] = headers['content-length'];
responseObject.readStream = response.httpResponse.createUnbufferedStream();
return resolve(responseObject);
}
})
.send();
});
}
Return a HTTP 303 Redirect with the Location header set to the blob's public URL in the S3 bucket.
If your bucket is private then you need to proxy the request instead of performing a redirect, unless your clients also have access to the bucket.

Categories

Resources