Firebase Cloud Function Too Slow - javascript

I have a cloud function that receives the uid of an image and associate it to the user who calls it after validating its dimensions and generate its thumbnail. It looks simple but I have to wait around 40 seconds to see the results, and sometimes it gets congested or something and I have to call the function again to see previous results.
Has anyone experience it before? How can I fix that?
exports.validateImageDimensions = functions
.region("us-central1")
.runWith({ memory: "2GB", timeoutSeconds: 120 })
.https.onCall(async (data, context) => {
As you can see the CPU used is high...
Thanks.
UPDATE
Code of the function:
exports.validateImageDimensions = functions
.region("us-central1")
.runWith({ memory: "2GB", timeoutSeconds: 120 })
.https.onCall(async (data, context) => {
// Libraries
const admin = require("firebase-admin");
const sizeOf = require("image-size");
const url = require("url");
const https = require("https");
const sharp = require("sharp");
const path = require("path");
const os = require("os");
const fs = require("fs");
// Lazy initialization of the Admin SDK
if (!is_validateImageDimensions_initialized) {
admin.initializeApp();
is_validateImageDimensions_initialized = true;
}
// Create Storage
const storage = admin.storage();
// Create Firestore
const firestore = admin.firestore();
// Get the image's owner
const owner = context.auth.token.uid;
// Get the image's info
const { id, description, location, tags } = data;
// Photos's bucket
const bucket = storage.bucket("bucket");
// File Path
const filePath = `photos/${id}`;
// Get the file
const file = getFile(filePath);
// Check if the file is a jpeg image
const metadata = await file.getMetadata();
const isJpgImage = metadata[0].contentType === "image/jpeg";
// Get the file's url
const fileUrl = await getUrl(file);
// Get the photo dimensions using the `image-size` library
https.get(url.parse(fileUrl), (response) => {
let chunks = [];
response
.on("data", (chunk) => {
chunks.push(chunk);
})
.on("end", async () => {
// Check if the image has valid dimensions
let dimensions = sizeOf(Buffer.concat(chunks));
// Create the associated Firestore's document to the valid images
if (isJpgImage && hasValidDimensions(dimensions)) {
// Create a thumbnail for the uploaded image
const thumbnailPath = await generateThumbnail(filePath);
// Get the thumbnail
const thumbnail = getFile(thumbnailPath);
// Get the thumbnail's url
const thumbnailUrl = await getUrl(thumbnail);
try {
await firestore
.collection("posts")
.doc(owner)
.collection("userPosts")
.add({
id,
uri: fileUrl,
thumbnailUri: thumbnailUrl, // Useful for progress images
description,
location,
tags,
date: admin.firestore.FieldValue.serverTimestamp(),
likes: [], // At the first time, when a post is created, zero users has liked it
comments: [], // Also, there aren't any comments
width: dimensions.width,
height: dimensions.height,
});
// TODO: Analytics posts counter
} catch (err) {
console.error(
`Error creating the document in 'posts/{owner}/userPosts/' where 'id === ${id}': ${err}`
);
}
} else {
// Remove the files that are not jpeg images, or whose dimensions are not valid
try {
await file.delete();
console.log(
`The image '${id}' has been deleted because it has invalid dimensions.
This may be an attempt to break the security of the app made by the user '${owner}'`
);
} catch (err) {
console.error(`Error deleting invalid file '${id}': ${err}`);
}
}
});
});
/* ---------------- AUXILIAR FUNCTIONS ---------------- */
function getFile(filePath) {
/* Get a file from the storage bucket */
return bucket.file(filePath);
}
async function getUrl(file) {
/* Get the public url of a file */
const signedUrls = await file.getSignedUrl({
action: "read",
expires: "01-01-2100",
});
// signedUrls[0] contains the file's public URL
return signedUrls[0];
}
function hasValidDimensions(dimensions) {
// Posts' valid dimensions
const validDimensions = [
{
width: 1080,
height: 1080,
},
{
width: 1080,
height: 1350,
},
{
width: 1080,
height: 750,
},
];
return (
validDimensions.find(
({ width, height }) =>
width === dimensions.width && height === dimensions.height
) !== undefined
);
}
async function generateThumbnail(filePath) {
/* Generate thumbnail for the progressive images */
// Download file from bucket
const fileName = filePath.split("/").pop();
const tempFilePath = path.join(os.tmpdir(), fileName);
const thumbnailPath = await bucket
.file(filePath)
.download({
destination: tempFilePath,
})
.then(() => {
// Generate a thumbnail using Sharp
const size = 50;
const newFileName = `${fileName}_${size}_thumb.jpg`;
const newFilePath = `thumbnails/${newFileName}`;
const newFileTemp = path.join(os.tmpdir(), newFileName);
sharp(tempFilePath)
.resize(size, null)
.toFile(newFileTemp, async (_err, info) => {
// Uploading the thumbnail.
await bucket.upload(newFileTemp, {
destination: newFilePath,
});
// Once the thumbnail has been uploaded delete the temporal file to free up disk space.
fs.unlinkSync(tempFilePath);
});
// Return the thumbnail's path
return newFilePath;
});
return thumbnailPath;
}
});
Pd: In the console I can read this record:
"Function execution took 103 ms, finished with status code: 200"
but I have to wait, as I said before, around 40 seconds to see the new doc on my firestore

You're not dealing with promises correctly. A callable function must return a promise that:
Resolves when all of the async work is complete
Resolves with the data to send back to the client
Right now, your function returns nothing, so it returns to the caller immediately, and the future of the async work that you kicked off is uncertain.
Note that https.get() is asynchronous and returns immediately, before its callback is invoked. You will need to find a way to instead return a promise that resovles when all of the callback's work is complete. (Consider that there are other HTTP client libraries that make it easier to get a promise instead of having to deal with callbacks.)

Related

Is it possible to append to an existing file with Chrome's File System Access API

Using the new File System Access APIit is possible to read and write to files and folders on the user's device:
const newHandle = await window.showSaveFilePicker();
const writableStream = await newHandle.createWritable();
await writableStream.write("Hello World")
await writableStream.close();
The above code will write "Hello World" to the chosen file. If the file already exists, it will be truncated and overwritten with the new content.
Is it possible to instead append to an existing file without reading the whole file and writing it again? A good example could be writing to a log file.
Joe Duncan is correct (and thanks for the tip!)
But you need to open the writeable file in 'append' mode.
I have need to close/flush the file after every line is written, and then re-open, seek and append to that file (but only 1 click to the SaveFile button)
import {} from "wicg-file-system-access"
/**
* Supply a button-id in HTML, when user clicks the file is opened for write-append.
*
* Other code can: new LogWriter().writeLine("First line...")
* to queue writes before user clicks the button.
*
* file is flushed/closed & re-opened after every writeLine.
* (so log is already saved if browser crashes...)
*/
export class LogWriter {
fileHandle: FileSystemFileHandle;
writeablePromise: Promise<FileSystemWritableFileStream> = this.newWriterPromise;
writerReady: (value: FileSystemWritableFileStream | PromiseLike<FileSystemWritableFileStream>) => void
writerFailed: (reason?: any) => void
contents: string
get newOpenPromise () { return new Promise<FileSystemWritableFileStream>((fil, rej)=>{
this.writerReady = fil;
this.writerFailed = rej
})}
constructor(name = 'logFile', public buttonId = "fsOpenFileButton") {
const options = {
id: 'logWriter',
startIn: 'downloads', // documents, desktop, music, pictures, videos
suggestedName: name,
types: [{
description: 'Text Files',
accept: { 'text/plain': ['.txt'], },
}, ],
};
this.setButton('showSaveFilePicker', options, (value) => {
this.fileHandle = value as FileSystemFileHandle
this.openWriteable()
})
}
async openWriteable(fileHandle: FileSystemFileHandle = this.fileHandle,
options: FileSystemCreateWritableOptions = { keepExistingData: true }) {
let writeable = await fileHandle.createWritable(options)
let offset = (await fileHandle.getFile()).size
writeable.seek(offset)
this.writerReady(writeable)
}
async writeLine(text: string) {
try {
let line = `${text}\n`
let stream = (await this.openPromise) // indicates writeable is ready
await stream.seek((await this.fileHandle.getFile()).size)
await stream.write({type: 'write', data: line});
let closePromise = this.closeFile() // flush to real-file
this.openPromise = this.newOpenPromise // new Promise for next cycle:
await closePromise
while (!this.openPromise.value) await this.openWriteable()
} catch (err) {
console.warn(stime(this, `.writeLine failed:`), err)
throw err
}
}
async closeFile() {
try {
return (await this.writeablePromise).close();
} catch (err) {
console.warn(stime(this, `.closeFile failed:`), err)
throw err
}
}
/** multi-purpose picker button: (callback arg-type changes) */
setButton(method: 'showOpenFilePicker' | 'showSaveFilePicker' | 'showDirectoryPicker',
options: OpenFilePickerOptions & { multiple?: false; } & SaveFilePickerOptions & DirectoryPickerOptions,
cb: (fileHandleAry: any) => void) {
const picker = window[method] // showSaveFilePicker showDirectoryPicker
const fsOpenButton = document.getElementById(this.buttonId)
fsOpenButton.innerText = method.substring(4, method.length - 6)
fsOpenButton.onclick = () => {
picker(options).then((value: any) => cb(value), (rej: any) => {
console.warn(`showOpenFilePicker failed: `, rej)
});
}
return fsOpenButton
}
}
You can get the current file size and set the writing position to the end:
const newHandle = await window.showSaveFilePicker();
const writableStream = await newHandle.createWritable();
// Get the current file size.
const size = (await fileHandle.getFile()).size;
await writableStream.write({
type: "write",
data: "Hello World",
position: size // Set the position to the current file size.
})
await writableStream.close();
More info:
https://developer.mozilla.org/en-US/docs/Web/API/FileSystemWritableFileStream/write
https://developer.mozilla.org/en-US/docs/Web/API/IDBMutableFile/getFile

Upload .vhd as Page-Blob to azure-blob-storage from Url

i have a bunch of VHD files stored on a private Server, which are accessible through a url.
I am trying upload these vhd files directly to my azure storage account using the azure javascript npm libraries. The vhds have to be uploaded as page-blobs. I tried using the method uploadPagesFromURL() of the pageblobClient but with no success. My code looks roughly like this:
async function uploadVHD(accessToken, srcUrl)
{
try {
// Get credentials from accessToken
const creds = new StorageSharedKeyCredential(storageAccount.name, storageAccount.key);
// Get blobServiceClient
const blobServiceClient = new BlobServiceClient(`https://${storageAccount.name}.blob.core.windows.net`, creds);
// Create Container
const containerClient = blobServiceClient.getContainerClient("vhd-images");
await containerClient.createIfNotExists();
const src = srcUrl.replace('https://', 'https://username:password#');
// Upload to blob storage
const pageBlobClient = containerClient.getPageBlobClient("Test.vhd");
// Get fileSize of vhd
const fileSize = (await axiosRequest(src, { method: "HEAD" })).headers["content-length"];
const uploadResponse = await pageBlobClient.uploadPagesFromURL(src, 0, 0, fileSize);
return uploadResponse;
} catch (error) {
return error;
}
});
It is not possible to upload the Page Blob with your URL directly. You need to read data from the url. Then upload using uploadPages method.
axios.get(URL, {
responseType: 'arraybuffer'
})
.then((response) => {
console.log(response.data)
console.log(response.data.length)
// upload page blob...
}).catch((error) => {
//handle error
});
// uploadPages method
const uploadResponse = pageBlobClient.uploadPages(data, 0, dataLength);

How to build a readable stream in Node.js and TypeScript?

I connected the typescript function to Azure Blobstorage through Rest-API and this works fine for me. Now I want to get each blob contents and read the contents of each blobs.
I try this with this code here, but it returns an error:
const blobServiceClient = new BlobServiceClient(`https://${accountName}.blob.core.windows.net?${sasToken}`,
pipeline)
const containerClient = blobServiceClient.getContainerClient(containerName)
console.log(containerClient)
if (!containerClient.exists()) {
console.log("the container does not exit")
await containerClient.create()
}
const client = containerClient.getBlockBlobClient(this.currentFile.name)
//name of uploded blob
console.log(this.currentFile.name)
//metaata from the blob
console.log(client)
//List each blobs in the container
for await (const blob of containerClient.listBlobsFlat()) {
console.log('\t', blob.name);
const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
const downloadBlockBlobResponse = await blockBlobClient.download(0);
console.log('\nDownloaded blob content...');
console.log('\t', await streamToString(downloadBlockBlobResponse.readableStreamBody));
//end of loop
}
async function streamToString(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on("data", (data) => {
chunks.push(data.toString());
});
readableStream.on("end", () => {
resolve(chunks.join(""));
});
readableStream.on("error", reject);
});
}
The error is :
ERROR Error: Uncaught (in promise): TypeError: Cannot read property 'on' of undefined TypeError: Cannot read property 'on' of undefined
So how to solve the problem?
Thanks
Download the official sample code.
It runs normally on my side. Check if your local lack of dependencies, or the permissions in the storage need to be set.
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
/*
Setup: Enter your storage account name and shared key in main()
*/
import {
BlobServiceClient,
StorageSharedKeyCredential,
BlobDownloadResponseModel
} from "#azure/storage-blob";
// Load the .env file if it exists
import * as dotenv from "dotenv";
dotenv.config();
export async function main() {
// Enter your storage account name and shared key
const account = process.env.ACCOUNT_NAME || "pans*****age";
const accountKey = process.env.ACCOUNT_KEY || "IHa48xxo+0anyKQ2GzQ2K*******ZBxgJ0VotCpGs/PMftkebb9UFqyg==";
// Use StorageSharedKeyCredential with storage account and account key
// StorageSharedKeyCredential is only available in Node.js runtime, not in browsers
const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey);
// ONLY AVAILABLE IN NODE.JS RUNTIME
// DefaultAzureCredential will first look for Azure Active Directory (AAD)
// client secret credentials in the following environment variables:
//
// - AZURE_TENANT_ID: The ID of your AAD tenant
// - AZURE_CLIENT_ID: The ID of your AAD app registration (client)
// - AZURE_CLIENT_SECRET: The client secret for your AAD app registration
//
// If those environment variables aren't found and your application is deployed
// to an Azure VM or App Service instance, the managed service identity endpoint
// will be used as a fallback authentication source.
// const defaultAzureCredential = new DefaultAzureCredential();
// You can find more TokenCredential implementations in the [#azure/identity](https://www.npmjs.com/package/#azure/identity) library
// to use client secrets, certificates, or managed identities for authentication.
// Use AnonymousCredential when url already includes a SAS signature
// const anonymousCredential = new AnonymousCredential();
// List containers
const blobServiceClient = new BlobServiceClient(
// When using AnonymousCredential, following url should include a valid SAS or support public access
`https://${account}.blob.core.windows.net`,
sharedKeyCredential
);
let i = 1;
for await (const container of blobServiceClient.listContainers()) {
console.log(`Container ${i++}: ${container.name}`);
}
// Create a container
const containerName = `newcontainer${new Date().getTime()}`;
const containerClient = blobServiceClient.getContainerClient(containerName);
const createContainerResponse = await containerClient.create();
console.log(`Create container ${containerName} successfully`, createContainerResponse.requestId);
// Create a blob
const content = "hello, 你好";
const blobName = "newblob" + new Date().getTime();
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.upload(content, Buffer.byteLength(content));
console.log(`Upload block blob ${blobName} successfully`, uploadBlobResponse.requestId);
// List blobs
i = 1;
for await (const blob of containerClient.listBlobsFlat()) {
console.log(`Blob ${i++}: ${blob.name}`);
}
// Get blob content from position 0 to the end
// In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody
// In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody
const downloadBlockBlobResponse: BlobDownloadResponseModel = await blockBlobClient.download(0);
console.log(
"Downloaded blob content",
await streamToString(downloadBlockBlobResponse.readableStreamBody!)
);
// Delete container
await containerClient.delete();
console.log("deleted container");
}
// A helper method used to read a Node.js readable stream into string
async function streamToString(readableStream: NodeJS.ReadableStream) {
return new Promise((resolve, reject) => {
const chunks: string[] = [];
readableStream.on("data", (data) => {
chunks.push(data.toString());
});
readableStream.on("end", () => {
resolve(chunks.join(""));
});
readableStream.on("error", reject);
});
}
main().catch((err) => {
console.error("Error running sample:", err.message);
});

Expand my image upload from 1 to 5 photos; map/foreach?

I am creating an app in which you can upload a photo, with some other data, to Firebase. The uploading part worked perfect with one picture. However I have now added a multiple-image picture (select 1 to 5 pictures) and would like my image upload function to upload the 5 pictures in stead of the 1.
The image upload works with 1 image provided, so how can I rearrange my code to upload the x-amount of photos in the array?
The pictures are added in the photos array with the following data (output shown below is a console.log from the images fetched);
Array [
Object {
"exists": true,
"file": "ph://8905951D-1D94-483A-8864-BBFDC4FAD202/L0/001",
"isDirectory": false,
"md5": "f9ebcab5aa0706847235887c1a7e4740",
"modificationTime": 1574493667.505371,
"size": 104533,
"uri": "ph://8905951D-1D94-483A-8864-BBFDC4FAD202/L0/001",
},
With this didFocus I check if the fethedImages param is set and set the photos array to the fetched images (So all the data that is shown above)
const didFocusSubscription = props.navigation.addListener(
'didFocus', () => {
let fetchedImages = props.navigation.getParam('fetchedImages')
console.log(fetchedImages)
setPhotos(fetchedImages)
setImageValid(true)
calculateImageDimensions()
}
);
When I save the page and start dispatching the data I run the following command the uploadImage function is ran and returns an uploadurl, this is then saved later on in the dispatch function to the Firebase Database to be fetched later;
uploadurl = await uploadImageAsync(photos)
SO the uploadImageAsync starts with the photos array forwarded. How can I make sure the function below is started for every photo.uri in the array? Can I use .map of for each for this, and in what context should I be using this?
Also I am not quite sure how I can send back an array of URLs to be saved together with the rest of the information.
async function uploadImageAsync(photos) {
console.log('uploadImage is gestart')
// Why are we using XMLHttpRequest? See:
// https://github.com/expo/expo/issues/2402#issuecomment-443726662
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function () {
resolve(xhr.response);
};
xhr.onerror = function (e) {
console.log(e);
reject(new TypeError('Network request failed'));
};
xhr.responseType = 'blob';
xhr.open('GET', photos, true);
xhr.send(null);
});
const ref = firebase
.storage()
.ref()
.child(uuid.v4());
const snapshot = await ref.put(blob);
// We're done with the blob, close and release it
blob.close();
return await snapshot.ref.getDownloadURL();
}
==============edited because of progress with uploading====================
Once again I am a little bit further. However the image upload function is now running, and because of is being multiple images I would like to await the response of all the images before continuing.
try {
uploadurl = await uploadImageAsync()
address = await getAddress(selectedLocation)
console.log(uploadurl)
if (!uploadurl.lenght) {
Alert.alert('Upload error', 'Something went wrong uploading the photo, plase try again', [
{ text: 'Okay' }
]);
return;
}
dispatch(
At this moment when I start the uploadImageAsync function. With he help of console.log I see it uploading the images, they also show up online. But while the pictures are uploading the upload url already returns with 0 and shows the Alert and stops the function.
uploadImageAsync = async () => {
const provider = firebase.database().ref(`providers/${uid}`);
let imagesArray = [];
try {
await photos.map(img => {
let file = img.data;
const path = "Img_" + uuid.v4();
const ref = firebase
.storage()
.ref(`/${uid}/${path}`);
ref.putString(file).then(() => {
ref
.getDownloadURL()
.then(images => {
imagesArray.push({
uri: images
});
console.log("Out-imgArray", imagesArray);
})
return imagesArray <== this return imagesArray is fired to early and starts the rest of my upload function.
} catch (e) {
console.error(e);
}
};
So a Discord chat pointed me in the way of a promise.all function for this to work. I tried that, but opened another stack overflow topic for getting this to work.
await response of image upload before continue function
The solution for my image upload function is in the topic above;
uploadImages = () => {
const provider = firebase.database().ref(`providers/${uid}`);
// CHANGED: removed 'let imagesArray = [];', no longer needed
return Promise.all(photos) // CHANGED: return the promise chain
.then(photoarray => {
console.log('all responses are resolved successfully');
// take each photo, upload it and then return it's download URL
return Promise.all(photoarray.map((photo) => { // CHANGED: used Promise.all(someArray.map(...)) idiom
let file = photo.data;
const path = "Img_" + uuid.v4();
const storageRef = firebase // CHANGED: renamed 'ref' to 'storageRef'
.storage()
.ref(`/${uid}/${path}`);
let metadata = {
contentType: 'image/jpeg',
};
// upload current photo and get it's download URL
return storageRef.putString(file, 'base64', metadata) // CHANGED: return the promise chain
.then(() => {
console.log(`${path} was uploaded successfully.`);
return storageRef.getDownloadURL() // CHANGED: return the promise chain
.then(fileUrl => ({uri: fileUrl}));
});
}));
})
.then((imagesArray) => { // These lines can
console.log("Out-imgArray: ", imagesArray) // safely be removed.
return imagesArray; // They are just
}) // for logging.
.catch((err) => {
console.error(err);
});
};

Permission denied when creating a storage trigger with a named storage bucket

Insufficient permissions to (re)configure a trigger (permission denied for bucket images). Please, give owner permissions to the editor role of the bucket and try again.
I am pretty new to firebase. I am trying to implement that if i upload image it resizes it and change the name.
The bucket name in what i want to have resized images is "images"
Why it complains about permissions if i have access to read and write?
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write;
}
}
}
_
const functions = require("firebase-functions");
const gcs = require("#google-cloud/storage")();
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
// // Create and Deploy Your First Cloud Functions
// // https://firebase.google.com/docs/functions/write-firebase-functions
//
exports.onFileChange = functions.storage
.bucket("images")
.object()
.onFinalize(event => {
const bucket = event.bucket;
const contentType = event.contentType;
const filePath = event.name;
console.log("File change detected, function execution started");
if (object.resourceState === "not_exists") {
console.log("We deleted a file, exit...");
return;
}
if (path.basename(filePath).startsWith("resized-")) {
console.log("We already renamed that file!");
return;
}
const destBucket = gcs.bucket(bucket);
const tmpFilePath = path.join(os.tmpdir(), path.basename(filePath));
const metadata = { contentType: contentType };
return destBucket
.file(filePath)
.download({
destination: tmpFilePath
})
.then(() => {
return spawn("convert", [
tmpFilePath,
"-resize",
"500x500",
tmpFilePath
]);
})
.then(() => {
return destBucket.upload(tmpFilePath, {
destination: "resized-" + path.basename(filePath),
metadata: metadata
});
});
});
What you're describing is a folder inside a bucket, not a bucket. What you likely want to do instead do .bucket().object() in your trigger definition and then, at the top of your function, do something like:
// Exit if this is triggered on a file that is not an image.
if (!filePath.startsWith('images/')) {
console.log('This is not in the images directory.');
return null;
}

Categories

Resources