I have done upload using Multer in NodeJS if storage is memoryStorage, since file is save in buffer first, and than from buffer I can upload to Google Drive,
But if using memoryStorage I can not rename image file,
I using multer.diskStorage but when I post, file is succeed upload but not the picture, file size become small like 10B.
this is my code in helper with function uploadImage
const util = require('util')
const gc = require('../config/')
const bucket = gc.bucket('jsimage')//bucket name
const { format } = util
const uploadImage = (file) => new Promise((resolve, reject) => {
console.log(file);
//const { originalname, buffer } = file
const { filename, destination } = file
//const blob = bucket.file(originalname.replace(/ /g, "_"))
const blob = bucket.file(filename)
const blobStream = blob.createWriteStream({
resumable: false
})
blobStream.on('finish', () => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
)
resolve(publicUrl)
})
.on('error', () => {
reject(`Unable to upload image, something went wrong`)
})
//.end(buffer)
.end(destination)
})
module.exports = uploadImage
with code above I succeed to upload in Google Drive but not the picture, since size is always 10B.
in this example, after the picture is uploaded to temp or any local folder, we can upload it to google cloud.
const util = require('util')
const gc = require('../config/')
const bucket = gc.bucket('jsimage')//bucket name di google drive
const path = require('path')
const { format } = util
// promises are built right away, so there's no need for then to resolve and catch for errors
const uploadImage = (file) => new Promise((resolve, reject) => {
//console.log(file);
const {filename} = file;
const picture = path.join(__dirname,'../uploads/',filename);
// This is the upload command
bucket.upload(picture);
// This is sent to return
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${filename}`
)
resolve(publicUrl)
reject(err=>(err))
})
module.exports = uploadImage
Related
I have to send static images to another Node app
To do this I need to get Base64 from file
This is the function I used in another project (VueJS web app):
export async function getBase64FromURL (path, filename) {
const fr = new FileReader()
fr.readAdDataURL(new File(path, filename))
return await new Promise((resolve, reject) => {
fr.onloadend = () => {
resolve(fr.result)
}
})
}
NodeJS lacks some functions, for example FileReader() and I found this npm package
But I haven't found a solution for new File(), what can I do?
// Import "fs" lib.
const fs = require('fs');
// Read file as buffer and convert to BASE64.
const fileBase64 = fs.readFileSync('./file.txt').toString('base64');
I using FileReader and I am trying but I got only getting FileName and FileFormat.
However, I couldn't How to get folderName Soma07
There are several modules available but I don't know how to make it work in React.
Does anybody know?
Here are the examples I found:
const [fileName, setfileName] = useState("")
const upLoadImage = async (e) => {
const file = e.target.files[0]
const base64 = await convertBase64(file);
console.log(base64);
setfileName(file.name)
}
const convertBase64 = (file) => {
return new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.readAsDataURL(file);
fileReader.onload = (() => {
resolve(fileReader.result)
})
fileReader.onerror = ((error) => {
reject(error)
})
})
}
Due to security reasons browsers don't allow to get file path. JavaScript in browser has no access to the File System.
I am trying to read the csv file inside the Firebase functions so that i can send the mail to the all the records. I am planning to go with the following procedure
upload the csv
fire a on finalize function
read the file and send emails
Below is the function
import * as functions from "firebase-functions";
import * as mkdirp from "mkdirp-promise";
import * as os from "os";
import * as path from "path";
import csv = require('csvtojson');
const gcs = require('#google-cloud/storage')({ keyFilename: 'service-account-credentials.json' });
const csvDirectory = "csv";
export = functions.storage.object().onFinalize(async (object) => {
const filePath = object.name;
const contentType = object.contentType;
const fileDir = path.dirname(filePath);
if(fileDir.startsWith(csvDirectory) && contentType.startsWith("text/csv")) {
const bucket = gcs.bucket(object.bucket);
const file = bucket.file(filePath);
const fileName = path.basename(filePath);
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
console.log("values", bucket, file, fileName, tempLocalDir, tempLocalFile);
console.log("csv file uploadedeeeed");
await mkdirp(tempLocalDir);
await bucket.file(filePath).download({
destination: tempLocalFile
});
console.log('The file has been downloaded to', tempLocalFile);
csv()
.fromFile(tempLocalFile)
.then((jsonObj) => {
console.log(jsonObj);
})
}
});
While running the code i am only getting csv file uploadeded which i have written inside the console.log and then i get the timeout after 1 minute .i am also not getting the The file has been downloaded to log . Can anybody look at the code and help me to get out of this.
You are mixing up the use of async/await together with a call to then() method. You should also use await for the fromFile() method.
The following should do the trick (untested):
export = functions.storage.object().onFinalize(async (object) => {
const filePath = object.name;
const contentType = object.contentType;
const fileDir = path.dirname(filePath);
try {
if (fileDir.startsWith(csvDirectory) && contentType.startsWith("text/csv")) {
//.....
await mkdirp(tempLocalDir);
await bucket.file(filePath).download({
destination: tempLocalFile
});
console.log('The file has been downloaded to', tempLocalFile);
const jsonObj = await csv().fromFile(tempLocalFile);
console.log(jsonObj);
return null;
} else {
//E.g. throw an error
}
} catch (error) {
//.....
}
});
Also note that (independently of the mixed use of async/await and then()), with the following line in your code
csv().fromFile(tempLocalFile).then(...)
you were not returning the Promise returned by the fromFile() method. This is a key point in Cloud Functions.
I would suggest you watch the official Video Series on Cloud Functions (https://firebase.google.com/docs/functions/video-series/) and in particular the videos on Promises titled "Learn JavaScript Promises".
I need to create a zip file with any PDF what I recieved from Storage AWS, and I am trying do this with ADM-zip in NodeJS, but i cant read the final file.zip.
Here is the code.
var zip = new AdmZip();
// add file directly
var content = data.Body.buffer;
zip.addFile("test.pdf", content, "entry comment goes here");
// console.log(content)
// add local file
zip.addLocalFile(`./tmp/boletos/doc.pdf`);
// // get everything as a buffer
var willSendthis = zip.toBuffer();
console.log(willSendthis)
// // or write everything to disk
zip.writeZip("test.zip", `../../../tmp/boletos/${datastring}.zip`);
As it is this only creates a .zip for each file..zip
I was also facing this issue. I looked through a lot of SO posts. This is how I was able to create a zip with multiple files from download urls. Please keep in mind, I'm unsure this is best practice, or if this is going to blow up memory.
Create a zip folder from a list of id's of requested resources via the client.
const zip = new AdmZip();
await Promise.all(sheetIds.map(async (sheetId) => {
const downloadUrl = await this.downloadSds({ sheetId, userId, memberId });
if (downloadUrl) {
await new Promise((resolve) => https.get(downloadUrl, (res) => {
const data = [];
res.on('data', (chunk) => {
data.push(chunk);
}).on('end', () => {
const buffer = Buffer.concat(data);
resolve(zip.addFile(`${sheetId}.pdf`, buffer));
});
}));
} else {
console.log('could not download');
}
}));
const zipFile = zip.toBuffer();
I then used downloadjs in my React.js client to download.
const result = await sds.bulkDownloadSds(payload);
if (result.status > 399) return store.rejectWithValue({ errorMessage: result?.message || 'Error', redirect: result.redirect });
const filename = 'test1.zip';
const document = await result.blob();
download(document, filename, 'zip');
When trying to access an image in my home directory of Firebase storage with node.js functions, I'm getting [object Object] as a response. I guess I initialized the bucket incorrectly, but not sure where I'm going wrong.
That's the debug info in firebase functions:
ChildProcessError: `composite -compose Dst_Out [object Object] [object Object] /tmp/output_final2.png` failed with code 1
Here's my code:
const admin = require('firebase-admin');
admin.initializeApp();
const storage = admin.storage();
const os = require('os');
const path = require('path');
const spawn = require('child-process-promise').spawn;
exports.onFileChange= functions.storage.object().onFinalize(async object => {
const bucket = storage.bucket('myID.appspot.com/');
const contentType = object.contentType;
const filePath = object.name;
console.log('File change detected, function execution started');
if (object.resourceState === 'not_exists') {
console.log('We deleted a file, exit...');
return;
}
if (path.basename(filePath).startsWith('changed-')) {
console.log('We already changed that file!');
return;
}
const destBucket = bucket;
const tmpFilePath = path.join(os.tmpdir(), path.basename(filePath));
const border = bucket.file("border.png");
const mask1 = bucket.file("mask1.png");
const metadata = { contentType: contentType };
return destBucket.file(filePath).download({
destination: tmpFilePath
}).then(() => {
return spawn('composite', ['-compose', 'Dst_Out', mask1, border, tmpFilePath]);
}).then(() => {
return destBucket.upload(tmpFilePath, {
destination: 'changed-' + path.basename(filePath),
metadata: metadata
})
}); });```
If, with
const bucket = storage.bucket('myID.appspot.com/');
your goal is to initialize the default bucket, you should just do
const bucket = storage.bucket();
since you have declared storage as admin.storage()
UPDATE (following your comment about const border = bucket.file("border.png");)
In addition, by looking at the code of a similar Cloud Function (from the official samples, using ImageMagick and spawn) it appears that you should not pass to the spawn() method some File objects created through the file() method of the Cloud Storage Node.js Client API (i.e. const border = bucket.file("border.png");) but some files that you have previously saved to a temp directory.
Look at the following excerpt from the Cloud Function example referred to above. They define some temporary directory and file paths (using the path module), download the files to this directory and use them to call the spawn() method.
//....
const filePath = object.name;
const contentType = object.contentType; // This is the image MIME type
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(path.join(fileDir, `${THUMB_PREFIX}${fileName}`)); // <---------
const tempLocalFile = path.join(os.tmpdir(), filePath); // <---------
const tempLocalDir = path.dirname(tempLocalFile); // <---------
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath); // <---------
//....
// Cloud Storage files.
const bucket = admin.storage().bucket(object.bucket);
const file = bucket.file(filePath);
const thumbFile = bucket.file(thumbFilePath);
const metadata = {
contentType: contentType,
// To enable Client-side caching you can set the Cache-Control headers here. Uncomment below.
// 'Cache-Control': 'public,max-age=3600',
};
// Create the temp directory where the storage file will be downloaded.
await mkdirp(tempLocalDir) // <---------
// Download file from bucket.
await file.download({destination: tempLocalFile}); // <---------
console.log('The file has been downloaded to', tempLocalFile);
// Generate a thumbnail using ImageMagick.
await spawn('convert', [tempLocalFile, '-thumbnail', `${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`, tempLocalThumbFile], {capture: ['stdout', 'stderr']});
//.....
You can't pass Cloud Storage File type objects to spawn. You need to pass strings that will be used to create the command line. This means you need to download those files locally to /tmp before you can work with them - ImageMagick doesn't know how to work file in Cloud Storage.