I've created a Google Cloud Function which, when it is trigged, it creates an xlsx file with exceljs and attaches it in an email sent with firebase-send-mail.
This is my code:
(data is dummy for test)
exports.onEventReservCreate = functions
.region("europe-west2")
.firestore.document(
"foodCourts/{foodCourtId}/events/{eventId}/eventReservations/{evtResId}"
)
.onCreate(async (snap, context) => {
try {
const excel = require("exceljs")
//Creating New Workbook
var workbook = new excel.Workbook()
//Creating Sheet for that particular WorkBook
var sheet = workbook.addWorksheet("Sheet1")
// set path for file
const tempFilePath = path.join(os.tmpdir(), "excel.xlsx")
sheet.columns = [
{ key: "name", header: "name" },
{ key: "age", header: "age" },
]
var data = [
{ name: "Eddy", age: 24 },
{ name: "Paul", age: 24 },
]
//adding each in sheet
data.forEach(el => sheet.addRow(el))
// get the user email from firestore db
const userRef = db.collection(`users`).doc(uid)
const user = (await userRef.get()).data()
workbook.xlsx
.writeFile(tempFilePath)
.then(res => {
// sending email to user
const emailData = {
to: [user.email],
template: {
name: "reportEvents",
data: {
attachmentPath: tempFilePath,
attachmentName: "nome allegato",
date: dateFnsTz.format(
new Date(),
"dd/MM/yyyy - HH:mm"
),
},
},
}
return db.collection("email").doc().set(emailData)
})
.catch(err => console.log("ERROR --> ", err))
} catch (err) {
console.log(
`Error while sending - Error: ${err}`
)
}
})
In functions log i have this error:
Error when delivering message=email/[id]: Error: ENOENT: no such file or directory, open '/tmp/excel.xlsx'
Why /tmp folder doesn't exist?
Thanx
The temp folder has limited and restrictive access, I am not sure how your email script is actually reading the file as it may just be a root folder issue or an instance issue. Instead, I would upload the file to storage with a unique download link and send that within your email.
SOLVED!
This is my code
it will require some packages like node-fs, os, path.
This code will create xslx file, storage it in your bucket, retrieve its url and add in "email" collection a new document that have "url" property that is the url of the xslx file in bucket, so user that will receive the mail can download it.
const admin = require("firebase-admin")
const fs = require("fs")
const path = require("path")
const os = require("os")
const excel = require("exceljs")
const batch = db.batch()
const bucket = admin.storage().bucket("your_bucket's_name")
//Creating New Workbook
var workbook = new excel.Workbook()
//Creating Sheet for that particular WorkBook
var sheet = workbook.addWorksheet("Sheet1")
//Header must be in below format
sheet.columns = [
{ key: "name", header: "name", width: 30 },
...other columns
]
//Data must be look like below, key of data must be match to header.
sheet.addRow({name: "John"}))
const tempFilePath = path.join(os.tmpdir(), "excel.xlsx")
await workbook.xlsx.writeFile(tempFilePath)
const destinationName = `[your_filename].xlsx`
const result = await bucket.upload(tempFilePath, {
destination: destinationName,
})
result[0].getSignedUrl({
action: "read",
expires: "03-17-2025", // choose a date
}).then((url, err) => {
if (err) {
console.log(err)
}
const emailData = {
to: ["user_email"],
template: {
name: "your_template's_name",
data: {
url,
},
}
const emailRef = db.collection("email").doc()
batch.set(emailRef, emailData)
return batch.commit()
}).catch(err => console.log(err))
Related
I'm going to delete old images using the schedule function. Before deleting these images, I have created thumbnail images, and I want to delete only the original images except for these thumbnail images.
The following is part of my code
scedule function
exports.scheduledDeleteFile = functions
.region("asia-northeast3")
.pubsub.schedule("every 5 minutes")
.onRun(async (context) => {
try {
const bucket = firebase.storage().bucket();
// get storage file
const [filesArray] = await bucket.getFiles({
prefix: "chat/files",
});
totalCount = filesArray.length;
// variables with our settings to be reused below
const now = Date.now();
const time_ago = Date.now() - 180000; // 3min test
const TIMESTAMP_AGO = new Date(time_ago); // change datetime
const DELETE_OPTIONS = { ignoreNotFound: true }; // ??
functions.logger.log(
`Found ${totalCount} files that need to be checked.`
);
const deleteOldFileResults = await Promise.all(
filesArray.map(async (file) => {
let metadata;
try {
// 파일에 대한 메타데이터를 가져옴
[metadata] = await file.getMetadata();
// metadata of file
const { temporaryHold, eventBasedHold, timeCreated } = metadata;
const TIME_CREATED = new Date(timeCreated);
const dispose = TIME_CREATED < TIMESTAMP_AGO;
// delete
if (dispose) {
await file.delete(DELETE_OPTIONS);
functions.logger.log("delete");
disposedCount++;
// ===================
// firestore file chat 업데이트
// 트리거 함수를 따로 만들어서 사용
}
return { file, metadata, disposed: dispose, skipped: activeHold };
} catch (error) {}
})
);
} catch (error) {}
});
My thumbnail image is in the same path as the original file. Is there an option to exclude certain files when importing them? (For example, except for a file whose name precedes "thumb_")
await bucket.getFiles({
prefix: "chat/files",
});
The following is a create thumbnail function. I referred to the example provided by firebase.
https://github.com/firebase/functions-samples/tree/main/2nd-gen/thumbnails
// thumb image name size
const THUMB_MAX_HEIGHT = 200;
const THUMB_MAX_WIDTH = 200;
// thumb image name
const THUMB_PREFIX = "thumb_";
exports.generateThumbnail = functions
.region("asia-northeast3")
.storage.object()
.onFinalize(async (object) => {
// custom metadata
const userKey = object.metadata["userKey"];
const chatroomKey = object.metadata["chatroomKey"];
const type = object.metadata["type"];
// File and directory paths.
const filePath = object.name;
const contentType = object.contentType; // This is the image MIME type
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(
// ! if change path, error!
path.join(fileDir, `${THUMB_PREFIX}${fileName}`)
);
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath);
if (!contentType.startsWith("image/")) {
return functions.logger.log("This is not an image.");
}
if (fileName.startsWith(THUMB_PREFIX)) {
return functions.logger.log("Already a Thumbnail.");
}
// Cloud Storage files.
const bucket = admin.storage().bucket(object.bucket);
const file = bucket.file(filePath);
const thumbFile = bucket.file(thumbFilePath);
const metadata = {
contentType: contentType,
};
await mkdirp(tempLocalDir);
// Download file from bucket.
await file.download({ destination: tempLocalFile });
functions.logger.log("The file has been downloaded to", tempLocalFile);
// Generate a thumbnail using ImageMagick.
await spawn(
"convert",
[
tempLocalFile,
"-thumbnail",
`${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`,
tempLocalThumbFile,
],
{ capture: ["stdout", "stderr"] }
);
functions.logger.log("Thumbnail created at", tempLocalThumbFile);
// Uploading the Thumbnail.
await bucket.upload(tempLocalThumbFile, {
destination: thumbFilePath,
metadata: metadata,
});
functions.logger.log("Thumbnail uploaded to Storage at", thumbFilePath);
fs.unlinkSync(tempLocalFile);
fs.unlinkSync(tempLocalThumbFile);
const results = await Promise.all([
thumbFile.getSignedUrl({
action: "read",
expires: "03-01-2500",
}),
file.getSignedUrl({
action: "read",
expires: "03-01-2500",
}),
]);
functions.logger.log("Got Signed URLs.");
const thumbResult = results[0];
const originalResult = results[1];
const thumbFileUrl = thumbResult[0];
const fileUrl = originalResult[0];
await file.delete().then((value) => {
functions.logger.log("원본 삭제 완료");
});
// Add the URLs to the Database
await admin
.database()
.ref("images")
.push({ path: fileUrl, thumbnail: thumbFileUrl });
return functions.logger.log("Thumbnail URLs saved to database.");
});
Is there an option to exclude certain files when importing them? (For example, except for a file whose name precedes "thumb_")
No. You can filter out the objects you don't want in the code that iterates the results.
I have come across the below code on the internet.
const { Octokit } = require("#octokit/rest");
const { Base64 } = require("js-base64");
const fs = require("fs");
const { Octokit } = require("#octokit/rest");
const { Base64 } = require("js-base64");
const fs = require("fs");
require("dotenv").config();
const octokit = new Octokit({
auth: process.env.GITHUB_ACCESS_TOKEN,
});
const main = async () => {
try {
const content = fs.readFileSync("./input.txt", "utf-8");
const contentEncoded = Base64.encode(content);
const { data } = await octokit.repos.createOrUpdateFileContents({
// replace the owner and email with your own details
owner: "your-github-account",
repo: "octokit-create-file-example",
path: "OUTPUT.md",
message: "feat: Added OUTPUT.md programatically",
content: contentEncoded,
committer: {
name: `Octokit Bot`,
email: "your-email",
},
author: {
name: "Octokit Bot",
email: "your-email",
},
});
console.log(data);
} catch (err) {
console.error(err);
}
};
main();
I was successfully able to create a file in GitHub. Is there a way to update an existing file (like adding some data to the file) with Octokit js?
I believe you do the same thing but you have to provide a sha property in your call to createOrUpdateFileContents which contains the sha of the blob you are replacing:
https://docs.github.com/en/rest/reference/repos#create-or-update-file-contents
As seen in the title, I am currently using sailjs + skipper-better-s3 for s3 upload. Started with uploading one file which works great, then because change request the need of multi-file upload at once so I added a for loop but by doing this, all keys will be the same and ended up the only one file is uploaded which is the last uploaded file but with the first upload filename.
I did read some articles and people are saying something like The problem is because for loop is synchronous and file upload is asynchronous and people saying the result of this is using a recursion which I tried too but no luck though, the same thing happens.
My recursive code is below...
s3_upload_multi: async (req, res) => {
const generatePath = (rootPath, fieldName) => {
let path;
// this is just a switch statement here to check which fieldName is provided then value of path will depend on it
// as for the other two variable is just checking if upload content type is correct
return { path };
};
const processUpload = async ({
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}) => {
return new Promise(function (resolve, reject) {
req.file(fieldName).upload(awsOp, async (err, filesUploaded) => {
if (err) reject(err);
const filesUploadedF = filesUploaded[0]; // F = first file
const response = {
status: true,
errCode: 200,
msg: 'OK',
response: {
url: filesUploadedF.extra.Location,
size: filesUploadedF.size,
type: fileExtension,
filename: filesUploadedF.filename,
key: filesUploadedF.extra.Key,
field: fieldName,
}
};
resolve(response);
});
});
}
const process_recur = async (files, fieldName) => {
if (files.length <= 0) return;
const fileUpload = files[0].stream;
const rootPath = `${sails.config.aws.upload.path.root}`;
const fileCType = fileUpload.headers['content-type'];
// console.log(fileCType, 'fileCType');
const { path } = generatePath(rootPath, fieldName);
const fileName = fileUpload.filename;
const fileExtension = fileUpload.filename.split('.').pop();
const genRan = await UtilsService.genRan(8);
const fullPath = `${path}${genRan}-${fileName}`;
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: fullPath,
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
const config = {
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}
const procceed = await processUpload(config);
files.shift();
await process_recur(files, fieldName);
};
try {
const fieldName = req._fileparser.upstreams[0].fieldName;
const files = req.file(fieldName)._files;
await process_recur(files, fieldName);
} catch (e) {
console.log(e, 'inside UploadService');
return false;
}
}
below is the code for me using for loop which is quite similiar from above though
s3_upload_multi: async (req, res) => {
const generatePath = (rootPath, fieldName) => {
let path;
// this is just a switch statement here to check which fieldName is provided then value of path will depend on it
// as for the other two variable is just checking if upload content type is correct
return { path };
};
const processUpload = async ({
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}) => {
return new Promise(function (resolve, reject) {
req.file(fieldName).upload(awsOp, async (err, filesUploaded) => {
if (err) reject(err);
const filesUploadedF = filesUploaded[0]; // F = first file
const response = {
status: true,
errCode: 200,
msg: 'OK',
response: {
url: filesUploadedF.extra.Location,
size: filesUploadedF.size,
type: fileExtension,
filename: filesUploadedF.filename,
key: filesUploadedF.extra.Key,
field: fieldName,
}
};
resolve(response);
});
});
}
try {
const fieldName = req._fileparser.upstreams[0].fieldName;
const files = req.file(fieldName)._files;
for (const file of files) {
const fileUpload = file.stream;
const rootPath = `${sails.config.aws.upload.path.root}`;
const fileCType = fileUpload.headers['content-type'];
// console.log(fileCType, 'fileCType');
const fileName = fileUpload.filename;
const { path } = generatePath(rootPath, fieldName);
const fileExtension = fileUpload.filename.split('.').pop();
// using a variable here because if this is an image, a thumbnail will be created with the same name as the original one
const genRan = await UtilsService.genRan(8);
const fullPath = await `${path}${genRan}-${fileName}`;
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: fullPath,
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
const config = {
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}
const procceed = await processUpload(config);
console.log(procceed, 'procceed');
}
} catch (e) {
console.log(e, 'inside UploadService');
return false;
}
}
Which part am I making mistake that's causing such behavior? I checked my path it's totally correct with correct filename too when I console.log
Thanks in advance for any suggestions and help.
Took me quite a lot of time to figure this out ages ago.
Especially you are using skipper-better-s3 which did not conclude as much detailed documentation as skipper, going back to look into skipper documentation actually the saveAs field doesn't only take string but also a function which you can then use that to get each file's filename and return it as needed so actually you do not even need to use neither resursive or for loop at all.
for example with some of your codes
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: (__newFileStream, next) => {
// generatePath is what you wrote
// __newFileStream.filename would the filename of each each before uploading
// the path is pretty much the s3 key which includes your filename too
const { path } = generatePath(rootPath, __newFileStream.filename, fieldName);
return next(undefined, path);
},
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
skipper documentation https://www.npmjs.com/package/skipper#customizing-at-rest-filenames-for-uploads
I use the json2csv module to convert the data I have to the csv extension and send it by mail, but all the data appears in the same column. What should I do to make it appear in separate columns
this is my json2csv.js
const {Parser} = require('json2csv')
module.exports.toCsv = function(list,header){
var arr =[];
for (let i =0; i<header.length;i++){
arr.push(header[i])
}
const field = arr;
const opts = { field };
try {
const parser = new Parser(opts);
const csv = parser.parse(list);
console.log(csv);
return csv
} catch (err) {
console.error(err);
}
}
This is my sendMail.js
var mailOptions = {
from: 'abc#gmail.com',
to: 'abc#gmail.com',
subject:param,
text: 'Liste Ektedir',
attachments: [{
filename: param+'.csv',content:toCsv(groupsList,header)}
]
};
Csv file that i opened after sending mail
This is I want
I generated pdf through nodejs. i want to add watermark to this generated pdf. I used dynamic-html-pdf plugins in my code. If there is any options for adding watermark in dynamic-html-pdf. Here i display my sample code here.
var path=require('path');
var pdf = require('dynamic-html-pdf');
var html='<!DOCTYPE html><html><head><style>';
html=html+'</style>';
html=html+'</head>';
html=html+'<body>';
html=html+'<div class="divstyle1" id="content">A computer is a device that can be instructed to carry out sequences of arithmetic or logical operations automatically via computer programming. </div>';
html=html+'</body></html>';
var options = {
format: "A4",
orientation: "portrait",
border: "10mm",
base: 'file://' + path.resolve('./public/graph') + '/'
};
var document = {
type: 'file',
template: html,
context: {
img:'./public/graph/logo.jpg'
},
path: "./public/graph/mypdf.pdf"
};
pdf.create(document, options)
.then(res => {
res.status(200).json({
message: 'pdf created'
});
})
.catch(error => {
res.status(200).json({
message: 'error'
});
});
After saving your pdf document. Use image-watermark module to append a watermark to your generated pdf.
var watermark = require('image-watermark');
watermark.embedWatermark('/path/to/your/generated/pdf', {'text' : 'sample watermark'});
After pdf is created, use this package to watermark the pdf file.
pdf-watermark
const PDFWatermark = require('pdf-watermark');
await PDFWatermark({
pdf_path: "./newsletter.pdf",
text: "Gentech", //optional
image_path: "./everest.png",
});
Another solution to add text watermark in PDF document is Aspose.PDF Cloud SDK for Node.js. It is a commercial product but provides 150 free monthly API calls.
Currently, it supports the PDF file processing from Cloud Storages: Aspose internal storage, Amazon S3, DropBox, Google Drive Storage, Google Cloud Storage, Windows Azure Storage and FTP Storage. However, we have a plan to add support to process PDF documents from the request body(stream).
P.S: I'm a developer evangelist at Aspose.
const { PdfApi } = require("asposepdfcloud");
const { TextStamp }= require("asposepdfcloud/src/models/textStamp");
const { TextState }= require("asposepdfcloud/src/models/textState");
const { HorizontalAlignment }= require("asposepdfcloud/src/models/horizontalAlignment");
const { VerticalAlignment }= require("asposepdfcloud/src/models/verticalAlignment");
const { Rotation }= require("asposepdfcloud/src/models/rotation");
// Get Client Id and Client Secret from https://dashboard.aspose.cloud/
pdfApi = new PdfApi("xxxxx-xxxx-xxxx-xxxx-xxxxxxxxxx", "xxxxxxxxxxxxxxxxxxxxxxx");
var fs = require('fs');
const name = "Test.pdf";
const pageNumber = 1;
const remoteTempFolder = "Temp";
const localTestDataFolder = "C:\\Temp";
const path = remoteTempFolder + "\\" + name;
var data = fs.readFileSync(localTestDataFolder + "\\" + name);
// Upload File
pdfApi.uploadFile(path, data).then((result) => {
console.log("Uploaded File");
}).catch(function(err) {
// Deal with an error
console.log(err);
});
// Add Text Stamp
const textState = new TextState();
textState.fontSize = 14;
textState.font = 'Arial';
const stamp = new TextStamp();
stamp.background = true;
stamp.leftMargin = 1;
stamp.rightMargin = 2;
stamp.topMargin = 3;
stamp.bottomMargin = 4;
stamp.horizontalAlignment = HorizontalAlignment.Center;
stamp.verticalAlignment = VerticalAlignment.Center;
stamp.opacity = 1;
stamp.rotate = Rotation.None;
stamp.rotateAngle = 0;
stamp.xIndent = 0;
stamp.yIndent = 0;
stamp.zoom = 1;
stamp.textAlignment = HorizontalAlignment.Center;
stamp.value = "Aspose.PDF Cloud";
stamp.textState = textState;
pdfApi.postPageTextStamps(name, pageNumber,[stamp], null, remoteTempFolder).then((result) => {
console.log(result.body.code);
}).catch(function(err) {
// Deal with an error
console.log(err);
});
//Download file
const localPath = "C:/Temp/textstamp.pdf";
pdfApi.downloadFile(path).then((result) => {
console.log(result.response.statusCode);
console.log(result.body.byteLength);
fs.writeFileSync(localPath, result.body);
console.log("File Downloaded");
}).catch(function(err) {
// Deal with an error
console.log(err);
});