This is my cloud function that is supposed to generate a watermarked image and store it in firebase storage everytime an image is uploaded.
exports.generateWatermark = functions.storage
.object()
.onFinalize(async object => {
try {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
return console.log('This is not an image.');
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a watermarked image.
if (fileName.startsWith('watermark_')) {
return console.log('Already a Watermarked image.');
}
if (!filePath.startsWith('pets')) {
return console.log('Not a pet image: ', filePath);
}
// Download file from bucket.
const bucket = admin.storage().bucket(fileBucket);
const tempFilePath = path.join(os.tmpdir(), fileName);
const tempWatermarkPath = path.join(os.tmpdir(), 'watermark.png');
const metadata = {
contentType: contentType,
};
// Generate a watermarked image using Jimp
await bucket.file(filePath).download({destination: tempFilePath});
await bucket
.file('logo/cbs.png')
.download({destination: tempWatermarkPath});
console.log('Image downloaded locally to', tempFilePath, filePath);
await spawn('convert', [
tempFilePath,
'-gravity',
'NorthWest',
'-draw',
`"image Over 10,10,200,200 ${tempWatermarkPath}"`,
tempFilePath,
]);
console.log('Watermarked image created at', tempFilePath);
// We add a 'watermark_' prefix
const watermarkFileName = `watermark_${fileName}`;
const watermarkFilePath = path.join(
path.dirname(filePath),
watermarkFileName,
);
// Uploading the watermarked image.
await bucket.upload(tempFilePath, {
destination: watermarkFilePath,
metadata: metadata,
});
// Once the watermarked image has been uploaded delete the local file to free up disk space.
fs.unlinkSync(tempFilePath);
return fs.unlinkSync(tempWatermarkPath);
} catch (err) {
console.log('GENERATE WATERMARK ERROR: ', err);
throw err;
}
});
The part of the code that errors out is the imagemagick part:
await spawn('convert', [
tempFilePath,
'-gravity',
'NorthWest',
'-draw',
`"image Over 10,10,200,200 ${tempWatermarkPath}"`,
tempFilePath,
]);
This is the error that I'm getting:
Is there a way I could get more info about the error? The error is not even reaching my catch block..
childprocess.spawn uses the observer pattern.
The return value from invoking childprocess.spawn is a ChildProcess object with stdout and stderr which are EventEmitters.
You'll need an extra step to promisify the existing interface before you can await it. For example,
const spawn = (command, args) => new Promise((resolve, reject) => {
const cp = require('child_process').spawn(command, args);
let err = null, out = null;
cp.stdout.on('data', data => out += data.toString());
cp.stdout.on('error', data => err += data.toString());
cp.on('error', data => err += data.toString());
cp.on('close', code => {
(code === 0) ? resolve(out) : reject(err)
});
})
childprocess.execFile on the other hand uses callbacks. This makes it easily promisifiable uses util.promisify function. For example
const util = require('util');
const execFile = util.promisify(require('child_process').execFile);
exports.generateWatermark = functions.storage
.object()
.onFinalize(async object => {
try {
//...
await execFile('convert', [
tempFilePath,
'-gravity',
'NorthWest',
'-draw',
`"image Over 10,10,200,200 ${tempWatermarkPath}"`,
tempFilePath,
]);
//...
} catch (err) {
console.log('GENERATE WATERMARK ERROR: ', err);
throw err;
}
});
Related
I tried to resize or compress an image before uploading to the google cloud storage.
The upload works fine but the resizing does not seem to work.
Here is my code:
const uploadImage = async (file) => new Promise((resolve, reject) => {
let { originalname, buffer } = file
sharp(buffer)
.resize(1800, 948)
.toFormat("jpeg")
.jpeg({ quality: 80 })
.toBuffer()
const blob = bucket.file(originalname.replace(/ /g, "_"))
const blobStream = blob.createWriteStream({
resumable: false
})
blobStream.on('finish', () => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
)
resolve(publicUrl)
}).on('error', () => {
reject(`Unable to upload image, something went wrong`)
})
.end(buffer)
})
I ran into the same issue with a project I was working on. After lots of trial and error I found the following solution. It might not be the most elegant, but it worked for me.
In my upload route function I created a new thumbnail image object with the original file values and passed it as the file parameter to the uploadFile function for google cloud storage.
Inside my upload image route function:
const file = req.file;
const thumbnail = {
fieldname: file.fieldname,
originalname: `thumbnail_${file.originalname}`,
encoding: file.encoding,
mimetype: file.mimetype,
buffer: await sharp(file.buffer).resize({ width: 150 }).toBuffer()
}
const uploadThumbnail = await uploadFile(thumbnail);
My google cloud storage upload file function:
const uploadFile = async (file) => new Promise((resolve, reject) => {
const gcsname = file.originalname;
const bucketFile = bucket.file(gcsname);
const stream = bucketFile.createWriteStream({
resumable: false,
metadata: {
contentType: file.mimetype
}
});
stream.on('error', (err) => {
reject(err);
});
stream.on('finish', (res) => {
resolve({
name: gcsname
});
});
stream.end(file.buffer);
});
I think the problem is with toFormat(). That function does not exist in the Docs. Can you try to remove it and check if it would work?
sharp(buffer)
.resize(1800, 948)
.jpeg({ quality: 80 })
.toBuffer()
Modify the metadata once you have finished uploading the image.
import * as admin from "firebase-admin";
import * as functions from "firebase-functions";
import { log } from "firebase-functions/logger";
import * as sharp from "sharp";
export const uploadFile = functions.https.onCall(async (data, context) => {
const bytes = data.imageData;
const bucket = admin.storage().bucket();
const buffer = Buffer.from(bytes, "base64");
const bufferSharp = await sharp(buffer)
.png()
.resize({ width: 500 })
.toBuffer();
const nombre = "IMAGE_NAME.png";
const fileName = `img/${nombre}.png`;
const fileUpload = bucket.file(fileName);
const uploadStream = fileUpload.createWriteStream();
uploadStream.on("error", async (err) => {
log("Error uploading image", err);
throw new functions.https.HttpsError("unknown", "Error uploading image");
});
uploadStream.on("finish", async () => {
await fileUpload.setMetadata({ contentType: "image/png" });
log("Upload success");
});
uploadStream.end(bufferSharp);
});
I want to upload file on firebase using JavaScript my file on my server and also have file path but firebase doesn't allow to upload file from file path and it shows the following error message:
code_: "storage/invalid-argument"
message_: "Firebase Storage: Invalid argument in `put` at index 0: Expected Blob or File."
name_: "FirebaseError"
serverResponse_: null
How can I upload the file or convert the file path to file object?
You can use this function to upload file in firebase. the uri is the file path which further creates the file type required in firebase.
const uploadImage = async (uri) => {
const uniqid = () => Math.random().toString(36).substr(2, 9);
const ext = uri.split('.').pop(); // Extract image extension
const filename = `${uniqid()}.${ext}`; // Generate unique name
const ref = fb.storage().ref().child(`images/${filename}`);
const response = await fetch(uri);
// const blob = await response.blob();
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function () {
resolve(xhr.response);
};
xhr.onerror = function (e) {
console.log(e);
reject(new TypeError('Network request failed'));
};
xhr.responseType = 'blob';
xhr.open('GET', uri, true);
xhr.send(null);
});
const snapshot = await ref.put(blob);
blob.close();
const imgUrl = await snapshot.ref.getDownloadURL();
return imgUrl;
}
after this function you can call this function to upload your Image in firebase database like this,
const formSubmit= async(values, action)=>{
console.log(values);
try{
uploadImage(image) //this is my image upload code
.then( async(res) => {
setLoader(false);
var user = fb.auth().currentUser;
await user.updateProfile({
displayName: values.name,
photoURL: res // use the response url to store in mapped data table
}).then(function(res) {
console.log(res)
}).catch(function(error) {
console.log(error)
});
})
.catch(error => {
console.log('it does not work')
console.error(error)
})
}catch(error){
console.log(error)
}
}
I want to upload file to mongodb with graphql resolver.
In server.js I have this help function to store file, which is exported to use it in my resolver.
The function is basing on what I found here: https://github.com/jaydenseric/graphql-upload/issues/8), but now some things have changed in graphql. For example destructurising file object. I don't know what should be found at path variable and how should I use this createReadStream(function which was destructurized from file).
const mongoose = require('mongoose');
const Grid = require('gridfs-stream');
const fs = require('fs');
//...
// Connect to Mongo
mongoose
.connect(process.env.mongoURI, {
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true,
useFindAndModify: false
}) // Adding new mongo url parser
.then(() => console.log('MongoDB Connected...'))
.catch(err => console.log(err));
const storeFile = async (upload) => {
const { filename, createReadStream, mimetype } = await upload.then(result => result);
const bucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, { bucketName: 'files' });
const uploadStream = bucket.openUploadStream(filename, {
contentType: mimetype
});
createReadStream()
.pipe(uploadStream)
.on('error', console.log('error'))
.on('finish', console.log('finish'));
}
module.exports = { storeFile }
//...
My resolver(here it's minimal version, because now I want only to upload file into my database. In one of my tries, it even created fs.files and fs.chunks collections, but without a data):
Mutation: {
uploadFile: async (_, { file }) => {
console.log(file);
const fileId = await storeFile(file);
return true;
}
}
I have this error now:
Unhandled Rejection (Error): GraphQL error: The "listener" argument
must be of type function. Received undefined
and in terminal I have printed 'error'(like in pipe.on('error', console.log('error') statement )
And I can upload only small files( max 60 kb), all larger just don't upload, but errors are showing on all tries.
Ok, I managed to solve it.
resolver mutation:
const { storeFile } = require('../../server');
//...
uploadFile: async (_, { file }) => {
const fileId = await storeFile(file).then(result => result);
return true;
// later I will return something more and create some object etc.
}
supporting function from server.js
const storeFile = async (upload) => {
const { filename, createReadStream, mimetype } = await upload.then(result => result);
const bucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, { bucketName: 'files' });
const uploadStream = bucket.openUploadStream(filename, {
contentType: mimetype
});
return new Promise((resolve, reject) => {
createReadStream()
.pipe(uploadStream)
.on('error', reject)
.on('finish', () => {
resolve(uploadStream.id)
})
})
}
module.exports = { storeFile }
I'm trying to create an app where a user can upload a zipped file, the app will unzip the file and save it to disk, and a path to the file will be saved to MongoDB for later retrieval.
I'm having a hard time getting the upload from form, unzipping, saving to disk, and uploading path of the unzipped file to the database all in one function. I'm really new to this and and am trying to learn about callbacks and such, I can't find any working solution for what I'm trying to do.
This is what my functions currently looks like:
// Multer is a form handling middleware
var storage = multer.diskStorage({
destination: function (req, file, cb) {
console.log(file)
cb(null, './uploads/unzip')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now() + path.extname(file.originalname))
},
})
const upload = multer({ storage }).single('file'); //this is the 1st func in the route
const unzipp = async (req, res, next) => { //second func in route
try {
const dir = 'uploads/unzipped/';
var stream = fs.createReadStream(req.file.path)
stream.pipe(unzip.Extract({path: dir}))
.on('entry', function () {
var fileName = entry.path;
var type = entry.type;
var size = entry.size;
console.log(fileName, type, size)
if (type.isDirectory) {
postfile() //TRYING TO CALL POSTFILE() HERE
console.log('unzipped and path saved')
} else {
res.error('Failed unzipping')
}
fs.unlink(req.file.path, function (e) {
if (e) throw e;
console.log('successfully deleted '+req.file.path);
});
})
} catch (e) {
console.error(e)
}
next();
}
//Upload is a mongoDB cluster Schema
async function postfile () {
try{
let newUpload = new Upload(req.body); //new instance of uplaod based on the model based on req.body
newUpload.title = req.body.title;
newUpload.description = req.body.description;
newUpload.labels = req.body.labels;
newUpload.filePath = fileName; //ASSIGN FILEPATH IN DB SCHEMA TO UNZIPPED FILE PATH
console.log("filePath saved")
newUpload.save()
.then(newUpload => {
res.status(200).json({file: "File added successfully"})
})
.catch(err => {
res.status(400).send('File upload failed to save to DB :(')
})
} catch (e) {
console.error(e);
}
}
As you can see I'm trying to call the function to save the mongo schema in unzipp function. This is the post route in a separate folder:
router.post('/upload', FileCtrl.upload, FileCtrl.unzipp)
I've also tried saving the entry path of the unzipped file as a global var (fileName) and assigning the path in the Schema as fileName, but it doesn't work either:
const unzipp = async (req, res, next) => {
try {
const dir = 'uploads/unzipped/';
var stream = fs.createReadStream(req.file.path)
stream.pipe(unzip.Extract({path: dir}))
.on('entry', function () {
fileName = entry.path;
type = entry.type;
size = entry.size;
console.log(fileName, type, size)
// if (type.isDirectory) {
// console.log('unzipped and path saved')
// } else {
// res.error('Failed unzipping')
// }
result = {
file: fileName,
message:"File has been extracted"
};
//var file = req.file
fs.unlink(req.file.path, function (e) {
if (e) throw e;
console.log('successfully deleted '+req.file.path);
});
res.json(result);
})
} catch (e) {
console.error(e)
}
next();
}
const postfile = async (req, res) => {
try{
console.log("Posting to DB")
let newUpload = new Upload(req.body); //new instance of uplaod based on the model based on req.body
newUpload.title = req.body.title;
newUpload.description = req.body.description;
newUpload.labels = req.body.labels;
newUpload.filePath = fileName;
console.log("Ok so far")
newUpload.save()
.then(newUpload => {
res.status(200).json({file: "File added successfully"})
})
.catch(err => {
res.status(400).send('File upload failed to save to DB :(')
})
} catch (e) {
console.error(e);
}
}
this gives the error " ReferenceError: fileName is not defined "
the new route looks like this:
router.post('/upload', FileCtrl.upload, FileCtrl.unzipp, FileCtrl.postfile)
I've been trying to solve this for a really long time and would really appreciate some advice.
EDIT:
For testing purposes I hardcoded the filepath and it saved to the DB perfectly...
const postfile = async (req, res) => {
try{
console.log("Posting to DB")
//var stream = fs.readdirSync('./uploads/unzipped/Nancy_Collins_118226967_v2')
let newUpload = new Upload(req.body); //new instance of uplaod based on the model based on req.body
newUpload.title = req.body.title;
newUpload.description = req.body.description;
newUpload.labels = req.body.labels;
newUpload.filePath = './uploads/unzipped/Nancy_Collins_118226967_v2';
console.log("Ok so far")
newUpload.save()
.then(newUpload => {
res.status(200).json({file: "File added successfully"})
})
.catch(err => {
res.status(400).send('File upload failed to save to DB :(')
})
} catch (e) {
console.error(e);
}
}
Obviously this isn't practical or dynamic, but it's possible.
This is what I am trying to do with Cloud Function.
When some audio file is uploaded on FireStorage it will be converted into mp3 format. After that, I want to get transcript by using CLOUD SPEECH-TO-TEXT.
But I keep getting this error message:
Each then() should return a value or throw
I am not familiar with javascript.
Here is my entire code for cloud function.
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const gcs = require('#google-cloud/storage')();
const speech = require('#google-cloud/speech');
const path = require('path');
const os = require('os');
const fs = require('fs');
const ffmpeg = require('fluent-ffmpeg');
const ffmpeg_static = require('ffmpeg-static');
admin.initializeApp(functions.config().firebase);
var db = admin.firestore();
function promisifyCommand(command) {
return new Promise((resolve, reject) => {
command
.on('end', () => {
resolve();
})
.on('error', (error) => {
reject(error);
})
.run();
});
}
/**
* When an audio is uploaded in the Storage bucket We generate a mono channel audio automatically using
* node-fluent-ffmpeg.
*/
exports.generateMonoAudio = functions.storage.object().onFinalize((object) => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is triggered on a file that is not an audio.
if (!contentType.startsWith('audio/')) {
console.log('This is not an audio.');
return null;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the audio is already converted.
if (fileName.endsWith('_output.mp3')) {
console.log('Already a converted audio.');
return null;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(os.tmpdir(), fileName);
// We add a '_output.mp3' suffix to target audio file name. That's where we'll upload the converted audio.
const targetTempFileName = fileName.replace(/\.[^/.]+$/, '') + '_output.mp3';
const targetTempFilePath = path.join(os.tmpdir(), targetTempFileName);
const targetStorageFilePath = path.join(path.dirname(filePath), targetTempFileName);
return bucket.file(filePath).download({
destination: tempFilePath,
}).then(() => {
console.log('Audio downloaded locally to', tempFilePath);
// Convert the audio to mono channel using FFMPEG.
let command = ffmpeg(tempFilePath)
.setFfmpegPath(ffmpeg_static.path)
.audioChannels(2)
.audioFrequency(32000)
.format('mp3')
.output(targetTempFilePath);
command = promisifyCommand(command);
return command;
}).then(() => {
console.log('Output audio created at', targetTempFilePath);
// Uploading the audio.
return bucket.upload(targetTempFilePath, {destination: targetStorageFilePath});
}).then(() => {
console.log('Output audio uploaded to', targetStorageFilePath);
// Once the audio has been uploaded delete the local file to free up disk space.
fs.unlinkSync(tempFilePath);
fs.unlinkSync(targetTempFilePath);
getTextFromAudio(targetStorageFilePath) //#### HERE! ERROR
return console.log('Temporary files removed.', targetTempFilePath);
});
});
function getTextFromAudio(paramTargetStorageFilePath) {
// Creates a client
const client = new speech.SpeechClient();
// Reads a local audio file and converts it to base64
const file = fs.readFileSync(paramTargetStorageFilePath);
const audioBytes = file.toString('base64');
// The audio file's encoding, sample rate in hertz, and BCP-47 language code
const audio = {
content: audioBytes,
};
const config = {
encoding: 'LINEAR16',
sampleRateHertz: 16000,
languageCode: 'en-US',
};
const request = {
audio: audio,
config: config,
};
// Detects speech in the audio file
return client
.recognize(request)
.then(data => {
const response = data[0];
const transcription = response.results
.map(result => result.alternatives[0].transcript)
.join('\n');
console.log(`Transcription: ${transcription}`);
}).catch(err => {
console.error('ERROR:', err);
});
}
function postTranscript(transcriptText) {
var docRef = db.collection('users').doc('alovelace');
var setAda = docRef.set({
first: transcriptText
});
}