Overwriting files in Node server - javascript

So I have a server that temporarily saved the files in it's memory, before I upload them to the database. Here's my code:
uploadImage(file, uid, res) {
var fs = require('fs');
mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
const writeStream = gfs.createWriteStream({
filename: file.filename,
});
fs.createReadStream(file.path).pipe(writeStream);
writeStream.on('close', file => {
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},
I am trying to optimize the images before uploading them to the database, like this:
async uploadImage(file, uid, res) {
const imagemin = require('imagemin');
const imageminJpegtran = require('imagemin-jpegtran');
const imageminPngquant = require('imagemin-pngquant');
console.log(1);
// const newFilePath = `${file.path}optimized`;
const newFile = await imagemin([file.path], file.path, {
plugins: [
imageminJpegtran(),
imageminPngquant({quality: '65-80'})
]
});
// newFile.path = newFilePath;
console.log(2);
console.log(file);
console.log(newFile);
var fs = require('fs');
await mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
const writeStream = gfs.createWriteStream({
filename: newFile.filename,
});
fs.createReadStream(newFile.path).pipe(writeStream);
writeStream.on('close', file => {
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},
But this tells me EEXIST: file already exists, mkdir '/home/alex/Documents/Projects/ontario-job-portal/public/images/logos/b80872b65d18d09bb498abcabe2f3f94', which is true. I'm trying to overwrite the previous image.
How can I make it overwrite the file though?
Creating a new file results in a permission denied error.
Edit. IMPORTANT
Turns out, that the object returned by the imagemin function is a little different from the previous one. Here's my working code:
async uploadImage(file, uid, res) {
const imagemin = require('imagemin');
const imageminJpegtran = require('imagemin-jpegtran');
const imageminPngquant = require('imagemin-pngquant');
console.log(1);
const path = require("path");
const newFilePath = path.dirname(file.path);
const newFile = await imagemin([file.path], path.dirname(file.path), {
plugins: [
imageminJpegtran(),
imageminPngquant({quality: '65-80'})
]
});
newFile.path = newFilePath;
newFile.filename = newFile[0].path.replace(/public\/images\/logos\//, '');
console.log(newFile.filename);
var fs = require('fs');
await mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
console.log(2);
const writeStream = gfs.createWriteStream({
filename: newFile[0].filename,
});
console.log(3);
fs.createReadStream(newFile[0].path).pipe(writeStream);
console.log(4);
writeStream.on('close', file => {
console.log(5);
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},

It looks like you are providing the path of an existing file to imagemin where a directory is required. To extract the directory from the path, use path.dirname(file.path):
const path = require("path");
const newFile = await imagemin([file.path], path.dirname(file.path), {
...
Note that this won't overwrite the existing file. It doesn't appear that imagemin supports that directly. You could do it yourself manually using fs if you wanted, but I'm not sure why you would. It seems like you are interested in using these as temporary files. You may want to add some code to delete the files after they have been written to mongo.

Related

How can I connect to GridFS bucket from my controller

I'm trying to use a gridfs bucket to store uploaded files. I have the upload sorted, but downloading is a bit more tricky.
to retrieve files i need to access the bucket instance, which I create in the database connecting function:
const connectDB = async () => {
try {
// connection
const conn = mongoose.createConnection(process.env.MONGO_URI, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
// init gfs
let gfs;
conn.once("open", () => {
// init stream
gfs = new mongoose.mongo.GridFSBucket(conn.db, {
bucketName: "assets",
});
});
return gfs;
} catch (err) {
console.error(err.message);
process.exit(1);
}
};
I think I'd end up with multiple connections and buckets if I called it again from my controller, is this the case?
What's the best way to access the gfs object from my controller?
At the start of application, call mongoose.connect and use mongoose.connection anywhere else, it will use the default connection automatically. For example:
index.js
const mongoose = require("mongoose");
mongoose.set("strictQuery", false);
mongoose.connect("mongodb://127.0.0.1:27017/test");
bucket.js
const mongoose = require("mongoose");
const { GridFSBucket } = require("mongodb");
const bucket = new GridFSBucket(mongoose.connection); // autouse default connection
module.exports = bucket;
controller.js
const bucket = require('./bucket')
router.post("/", (req, res) => {
const stream = bucket.openUploadStream("test.txt");
stream.write("test");
stream.on("finish", (file) => {
res.send("done");
});
stream.end();
});

Listing all the declared slash command to a json file Discord js

I'm building a discord bot using discord.js. All of my source code is from the official discord.js guide website, and I'm wondering how I could list all declared slash commands to a JSON file commands.json.
Here is my code:
deploy_commands.js:
const { SlashCommandBuilder } = require('#discordjs/builders');
const { REST } = require('#discordjs/rest');
const { Routes } = require('discord-api-types/v9');
const { clientId, guildId, token } = require('./config.json');
const fs = require('node:fs');
const path = require('node:path');
const commands = [];
const commandsPath = path.join(__dirname, 'commands');
const commandFiles = fs.readdirSync(commandsPath).filter(file => file.endsWith('.js'));
for (const file of commandFiles) {
const filePath = path.join(commandsPath, file);
const command = require(filePath);
commands.push(command.data.toJSON());
}
const rest = new REST({ version: '9' }).setToken(token);
rest.put(Routes.applicationGuildCommands(clientId, guildId), { body: commands })
.then(() => console.log('Successfully registered application commands.'))
.catch(console.error);
index.js:
const { Client, Collection, Intents } = require("discord.js");
const client = new Client({intents: [Intents.FLAGS.GUILDS]});
const config = require("./config.json");
const { guildId, clientId, token } = require('./config.json');
const fs = require('node:fs');
const path = require('node:path');
client.commands = new Collection();
const commandsPath = path.join(__dirname, 'commands');
const commandFiles = fs.readdirSync(commandsPath).filter(file => file.endsWith('.js'));
for (const file of commandFiles) {
const filePath = path.join(commandsPath, file);
const command = require(filePath);
client.commands.set(command.data.name, command);
}
client.once('ready', () => {
console.log(`user : ${client.user.tag}\nguildid : ${guildId}\nclientid : ${clientId}`);
});
client.on('interactionCreate', async interaction => {
if (!interaction.isCommand()) return;
const command = client.commands.get(interaction.commandName);
if (!command) return;
try {
await command.execute(interaction);
} catch (error) {
console.error(error);
await interaction.reply({content: 'Sorry, there was a problem while executing this command, maybe try again later?', ephemeral: true});
}
});
client.login(token);
deploy_commands.js is a file for deploying commands, and what I want to do is to save all the declared slash commands and transfer all of them to a JSON file.
I personally suggest you the usage of map() to manage the array where you store every command inside deploy_commands.js.
Following you can find a solution that worked for me:
const fs = require('fs');
const commands = [...commandBuilders...].map(command => command.toJSON());
const commandsToString = JSON.stringify(commands);
fs.writeFile('commands.json', commandsToString, (e, res) =>{
if (e) console.log('ERROR: ' + e);
}

Delete specific rows from a text file in NodeJS

I am trying to delete a specific row from a text file using NodeJs, but I am having trouble to do so. Can anyone please help me?
This is the content inside the text file and I am trying to delete example3:example3 from the text file.
example1:example1
example2:example2
example3:example3
example4:example4
This is the code that should work but doesn't
const fs = require('fs')
const path = require('path')
const filePath = path.join(__dirname, '../data/test.txt')
const fileData = fs.readFileSync(filePath, 'utf-8').split('\r\n')
const position = 3
fileData.splice(position - 1, 1)
You need to save the file with fs.writeFile or fs.writeFileSync
Synchronous:
const fs = require('fs')
const path = require('path')
const filePath = '../data/test.txt'
const filePath = path.join(__dirname, filePath)
const fileData = fs.readFileSync(filePath, 'utf-8').split('\r\n')
const position = 3
fileData.splice(position - 1, 1)
const dataString = fileData.join('\r\n')
fs.writeFileSync(filePath, dataString)
Asynchronous:
const fs = require('fs')
const path = require('path')
const filePath = '../data/test.txt'
const filePath = path.join(__dirname, filePath )
fs.readFile(filePath, (err, data) => {
if (err) throw err
const fileData a data.split('\r\n')
const position = 3
fileData.splice(position - 1, 1)
const dataString = fileData.join('\r\n')
fs.writeFile(filePath, dataString, (err) => {
if (err) throw err
else console.log('file saved')
})
})

Passing functions with require

I want to pass functions with require. Here's the code:
multerModule.js
const path = require('path');
const multer = require('multer');
const crypto = require('crypto');
const GridFsStorage = require('multer-gridfs-storage');
// Create storage engine
const initStorage = (conn, bucketName) => new GridFsStorage({
db: conn,
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: bucketName
};
resolve(fileInfo);
});
});
}
});
const initUpload = (storage) => multer({
storage: storage,
fileFilter: function (req, file, callback) {
const ext = path.extname(file.originalname);
if (ext !== '.png' && ext !== '.jpg' && ext !== '.gif' && ext !== '.jpeg') {
return callback(new Error('Only images are allowed'))
}
callback(null, true)
}
});
module.exports = { initStorage, initUpload };
offers.js
const router = require('express').Router();
const auth = require('../../middleware/auth');
const mongoose = require('mongoose');
const Grid = require('gridfs-stream');
const collectionName = 'offers';
const bucketName = 'offers';
const { initStorage, initUpload } = require('../../modules/multerModule');
const conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
// Init gfs
let gfs;
conn.once('open', () => {
// Init stream
gfs = Grid(conn.db);
gfs.collection(collectionName);
});
const storage = initStorage(conn, bucketName);
const upload = initUpload(storage);
And I get TypeError: storage is not a function
I think that I could to that by writing something like(correct me if I'm wrong):
module.exports = {
function1: function(params) {//do something},
function2: function(nextparams){//do something}
}
, but isn't a way to do that in this first way through these defined before module arrow functions ?

Google Cloud Functions - Upload to Google Cloud Storage via HTTP

I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving a ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Example code
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const moment = require('moment');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({
headers: req.headers
});
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function (file) {
storage
.bucket(bucketName)
.upload(file.path, {
name: moment().format('/YYYY/MM/DD/x') + '-' + file.name
})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}
Solved this with a stream instead of a temporary file. Only handles a single file at the moment though.
https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}

Categories

Resources