Passing functions with require - javascript

I want to pass functions with require. Here's the code:
multerModule.js
const path = require('path');
const multer = require('multer');
const crypto = require('crypto');
const GridFsStorage = require('multer-gridfs-storage');
// Create storage engine
const initStorage = (conn, bucketName) => new GridFsStorage({
db: conn,
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: bucketName
};
resolve(fileInfo);
});
});
}
});
const initUpload = (storage) => multer({
storage: storage,
fileFilter: function (req, file, callback) {
const ext = path.extname(file.originalname);
if (ext !== '.png' && ext !== '.jpg' && ext !== '.gif' && ext !== '.jpeg') {
return callback(new Error('Only images are allowed'))
}
callback(null, true)
}
});
module.exports = { initStorage, initUpload };
offers.js
const router = require('express').Router();
const auth = require('../../middleware/auth');
const mongoose = require('mongoose');
const Grid = require('gridfs-stream');
const collectionName = 'offers';
const bucketName = 'offers';
const { initStorage, initUpload } = require('../../modules/multerModule');
const conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
// Init gfs
let gfs;
conn.once('open', () => {
// Init stream
gfs = Grid(conn.db);
gfs.collection(collectionName);
});
const storage = initStorage(conn, bucketName);
const upload = initUpload(storage);
And I get TypeError: storage is not a function
I think that I could to that by writing something like(correct me if I'm wrong):
module.exports = {
function1: function(params) {//do something},
function2: function(nextparams){//do something}
}
, but isn't a way to do that in this first way through these defined before module arrow functions ?

Related

Convert & download the file node.js express

Hell everyone,
Please i need help to understand what i am doing wrong.
My problem is : i'am trying to create an app that convert from .xml to json then from json to .xlsx after that download the file as excel sheet.
anyway, the first part of the project was successfully done.
but when i try to convert the file json that i converted from .xml. to xlsx not working at all.
so the files are :
ControllerXML2Json.js
working successfully and i can see the json data
const { json } = require('body-parser')
const fs = require('fs')
const xml2js = require('xml2js')
let jsonString
async function xml2json() {
try {
const xml = fs.readFileSync('src/data/dataset.xml')
const result = await xml2js.parseStringPromise(xml, { mergeAttrs: true })
jsonString = JSON.stringify(result, null, 4)
//console.log(json)
fs.writeFileSync('./public/data/result.json', jsonString)
//return jsonString
} catch (error) {
console.log(error)
}
}
module.exports = {
convert(req, res) {
let xmlFile = req.files.fileXML
xmlFile.mv('src/data/dataset.xml', function (err) {
if (err) {
return res.status(500).send(err)
} else {
xml2json()
}
})
res.redirect('/json')
}
}
ControllerJson2Xlsx.js
not working at all
//const { json } = require("body-parser");
const fs = require("fs");
const json2xlsx = require("json2xlsx");
const path = require("path");
//const fileName = "data.xlsx";
async function generateExcelFromJSON() {
try {
let data = JSON.parse(
fs.readFileSync("public/data/result.json", {
encoding: "utf8",
flag: "r",
})
);
let newWS = xlsx.utils.json_to_sheet(data);
let newWB = xlsx.utils.book_new();
xlsx.utils.book_append_sheet(newWS, newWB, "data");
await xlsx.writeFile("./public/data/data.xlsx", newWB);
// xlsx.writeFile(newWB, "./public/data/data.xlsx");
} catch (error) {
console.log(error);
}
}
module.exports = {
download(req, res) {
let jsonFile = req.files.fileJSON;
jsonFile.mv("public/data/result.json", function (err) {
if (err) {
return res.status(500).send(err);
} else {
json2xlsx();
}
});
res.redirect("/xlsx");
},
};
route.js
const express = require('express')
const ControllerJson2Xlsx = require('./controllers/ControllerJson2Xlsx')
const ControllerXML2Json = require('./controllers/ControllerXML2Json')
const route = express.Router()
route.get('/', (req, res) => res.render('index'))
route.get('/json', (req, res) => res.render('json'))
route.post('/convert', ControllerXML2Json.convert);
route.get("/single", ControllerJson2Xlsx.download);
// route.post('/teste', (req, res) => {
// return res.send(data)
// })
module.exports = route

Firebase Cloud function to parse form data working incorrectly

I have an app which calls a cloud function endpoint:
import './App.css';
import React from 'react';
import axios from 'axios';
function App() {
const [file, setFile] = React.useState(null);
function fileSelected(e)
{
setFile(()=> e.target.files[0]);
}
function uploadFile()
{
console.log(file)
const fd = new FormData();
fd.append('image', file, file.name);
console.log(file);
console.log(file.name);
axios.post('https://us-central1-athelasapp.cloudfunctions.net/uploadFile', fd)
.then(res => {
console.log(res)
});
}
return (
<div className="App">
<input type="file" onChange={fileSelected}/>
<input type="submit" onClick={uploadFile}/>
</div>
);
}
export default App;
and the endpoint tries to parse it with Busboy however, it throws a 500 error. I have Busboy imported but it throws:
xhr.js:220 POST https://us-central1-athelasapp.cloudfunctions.net/uploadFile 500`
const functions = require("firebase-functions");
const express = require("express");
const cors = require("cors");
const app = express();
const Busboy = require("busboy");
const os = require("os");
const path = require("path");
const fs = require("fs");
const gcconfig = {
projectId: "athelasapp",
keyFilename: "athelasapp-firebase-adminsdk-yojnp-1e9141a009.json",
};
const {Storage} = require("#google-cloud/storage");
const gcs = new Storage(gcconfig);
app.use(cors({origin: "http://localhost:3000"}));
// // Create and Deploy Your First Cloud Functions
// https://firebase.google.com/docs/functions/write-firebase-functions
exports.uploadFile = functions.https.onRequest(app);
app.post("/", (req, res) =>{
if (req.method !== "POST") {
return res.status(500).json({
message: "Method Does Not Work",
});
}
const busboy = new Busboy({headers: req.headers});
let uploadData = null;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename);
uploadData = {file: filepath, type: mimetype};
file.pipe(fs.createWriteStream(filepath));
res.status(200).json({
imageDetails: uploadData,
});
});
busboy.on("finish", ()=>{
const bucket = gcs.bucket("athelasapp.appspot.com");
bucket.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type,
},
},
});
}).then(() => {
res.status(200).json({
message: "Method Works!",
});
}).catch((err) =>{
res.status(500).json({
message: "Method Failed!",
});
});
busboy.end(req.rawBody);
res.status(200).json({
message: "Method Works",
});
});
I cant find any errors in my code or how it's implemented? Could it be I'm passing in the wrong things in the request? I think it might have to do with the nomenclature of Busboy
Here's a working gist streaming directly instead of creating the temporary file: https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}

I'm trying to upload files with multer in node.js backend and react js in frontend but it doesn't work in back side with multer in public folder

frontend with React.js looks good i saw the other people who did same thing as i did but i don't know where is the problem ! anyone can help me ?
const [cours, setCours] = useState([]);
const [description, setDescription] = useState("")
const [title, setTitle] = useState("")
const coursHandle = (e) => { setCours([e.target.files]) }
const onsubmit = async (e) => {
e.preventDefault();
const formData = new FormData();
formData.append("description", description);
formData.append("title", title);
// cours.forEach((elem) => { formData.append("cours", elem) });
formData.append("cours", cours)
// for (let i = 0; i < cours.length; i++) {
// formData.append("cours", cours[i])
// }
await axios.post("http://localhost:5000/upload", formData)
.then((res) => console.log("successfully file post", res)).catch((err) =>
console.log("error with file post", err))
}
and backend with multer is here this code is in my app.js
app.use(express.static(path.join(__dirname, 'public')));
and the public folder is the same place as app.js
const multer = require("multer");
const path = require("path");
const MIME_TYPES = {
"file/pdf": "pdf",
"file/docx": "docx",
"file/txt": "txt",
"file/png": "png",
"file/jpeg": "jpg",
"file/jpg": "jpg",
}
const fileStorage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, "public");
},
filename: (req, file, cb) => {
const nameObject = path.parse(file.originalname);
// const nameObject = file.originalname.split(' ').join('_');
const extension = MIME_TYPES[file.mimetype];
cb(null, nameObject.name.split(" ").join("_") + Date.now() + "." + extension);
}
})
module.exports = { multerUpload: multer({ storage: fileStorage }).single("file") }
In last line
module.exports = { multerUpload: multer({ storage: fileStorage }).single("file")
the argument under single is file, but in your frontend form there is no input with name file maybe you have to change the argument of single function from file to cours
I found the answer for my question,
formData.append("cours", cours)
Here the key name as i try to send is "cours" and :
module.exports = { multerUpload: multer({ storage: fileStorage }).single("file")
Here the key name for single function is "file" !! that was a first problem and another one was this :
I've forgoten to add this line in my tag !
encType="multipart/form-data"
And finally my multer js is :
const fileStorage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, "public/files");
},
filename: (req, file, cb) => {
console.log("filename :", file);
const nameObject = path.parse(file.originalname);
cb(null, nameObject.name.split(" ").join("_") + Date.now() +
nameObject.ext);
}
})
module.exports = { multerUpload: multer({ storage: fileStorage }).array("cours") }

Overwriting files in Node server

So I have a server that temporarily saved the files in it's memory, before I upload them to the database. Here's my code:
uploadImage(file, uid, res) {
var fs = require('fs');
mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
const writeStream = gfs.createWriteStream({
filename: file.filename,
});
fs.createReadStream(file.path).pipe(writeStream);
writeStream.on('close', file => {
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},
I am trying to optimize the images before uploading them to the database, like this:
async uploadImage(file, uid, res) {
const imagemin = require('imagemin');
const imageminJpegtran = require('imagemin-jpegtran');
const imageminPngquant = require('imagemin-pngquant');
console.log(1);
// const newFilePath = `${file.path}optimized`;
const newFile = await imagemin([file.path], file.path, {
plugins: [
imageminJpegtran(),
imageminPngquant({quality: '65-80'})
]
});
// newFile.path = newFilePath;
console.log(2);
console.log(file);
console.log(newFile);
var fs = require('fs');
await mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
const writeStream = gfs.createWriteStream({
filename: newFile.filename,
});
fs.createReadStream(newFile.path).pipe(writeStream);
writeStream.on('close', file => {
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},
But this tells me EEXIST: file already exists, mkdir '/home/alex/Documents/Projects/ontario-job-portal/public/images/logos/b80872b65d18d09bb498abcabe2f3f94', which is true. I'm trying to overwrite the previous image.
How can I make it overwrite the file though?
Creating a new file results in a permission denied error.
Edit. IMPORTANT
Turns out, that the object returned by the imagemin function is a little different from the previous one. Here's my working code:
async uploadImage(file, uid, res) {
const imagemin = require('imagemin');
const imageminJpegtran = require('imagemin-jpegtran');
const imageminPngquant = require('imagemin-pngquant');
console.log(1);
const path = require("path");
const newFilePath = path.dirname(file.path);
const newFile = await imagemin([file.path], path.dirname(file.path), {
plugins: [
imageminJpegtran(),
imageminPngquant({quality: '65-80'})
]
});
newFile.path = newFilePath;
newFile.filename = newFile[0].path.replace(/public\/images\/logos\//, '');
console.log(newFile.filename);
var fs = require('fs');
await mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
console.log(2);
const writeStream = gfs.createWriteStream({
filename: newFile[0].filename,
});
console.log(3);
fs.createReadStream(newFile[0].path).pipe(writeStream);
console.log(4);
writeStream.on('close', file => {
console.log(5);
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},
It looks like you are providing the path of an existing file to imagemin where a directory is required. To extract the directory from the path, use path.dirname(file.path):
const path = require("path");
const newFile = await imagemin([file.path], path.dirname(file.path), {
...
Note that this won't overwrite the existing file. It doesn't appear that imagemin supports that directly. You could do it yourself manually using fs if you wanted, but I'm not sure why you would. It seems like you are interested in using these as temporary files. You may want to add some code to delete the files after they have been written to mongo.

Google Cloud Functions - Upload to Google Cloud Storage via HTTP

I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving a ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Example code
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const moment = require('moment');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({
headers: req.headers
});
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function (file) {
storage
.bucket(bucketName)
.upload(file.path, {
name: moment().format('/YYYY/MM/DD/x') + '-' + file.name
})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}
Solved this with a stream instead of a temporary file. Only handles a single file at the moment though.
https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}

Categories

Resources