Delete specific rows from a text file in NodeJS - javascript

I am trying to delete a specific row from a text file using NodeJs, but I am having trouble to do so. Can anyone please help me?
This is the content inside the text file and I am trying to delete example3:example3 from the text file.
example1:example1
example2:example2
example3:example3
example4:example4
This is the code that should work but doesn't
const fs = require('fs')
const path = require('path')
const filePath = path.join(__dirname, '../data/test.txt')
const fileData = fs.readFileSync(filePath, 'utf-8').split('\r\n')
const position = 3
fileData.splice(position - 1, 1)

You need to save the file with fs.writeFile or fs.writeFileSync
Synchronous:
const fs = require('fs')
const path = require('path')
const filePath = '../data/test.txt'
const filePath = path.join(__dirname, filePath)
const fileData = fs.readFileSync(filePath, 'utf-8').split('\r\n')
const position = 3
fileData.splice(position - 1, 1)
const dataString = fileData.join('\r\n')
fs.writeFileSync(filePath, dataString)
Asynchronous:
const fs = require('fs')
const path = require('path')
const filePath = '../data/test.txt'
const filePath = path.join(__dirname, filePath )
fs.readFile(filePath, (err, data) => {
if (err) throw err
const fileData a data.split('\r\n')
const position = 3
fileData.splice(position - 1, 1)
const dataString = fileData.join('\r\n')
fs.writeFile(filePath, dataString, (err) => {
if (err) throw err
else console.log('file saved')
})
})

Related

How to use createCipheriv for large files (Eg:mp4) in crypto/cryptoJs

I have created a code for compressing and encrypting but now I got to know that createCipher is deprecated I would like to replace with createCipheriv, the code for encryption:
/*Importing necessary modules*/
const fs = require('fs');
const zlib = require('zlib');
const path = require('path');
const crypto = require('crypto');
/*driver program*/
const getCipherKey = require('./getCipherKey');
function Encrypt({ file, password }){
// const initVect = crypto.randomBytes(16);
const CIPHER_KEY = getCipherKey(password);
const readStream = fs.createReadStream(file);
const zip = zlib.createGzip();
const cipher = crypto.createCipher('aes256', CIPHER_KEY);
const writeStream =fs.createWriteStream(path.join(file + ".enc"));
readStream
.pipe(zip)
.pipe(cipher)
.pipe(writeStream);
}
Encrypt({ file: './video.mp4', password: 'dogzrgr8' });
for generating key:
const crypto = require('crypto-js');
var sha256 = crypto.algo.SHA256.create();
value=getCipherKey("mySup3rC00lP4ssWord");
function getCipherKey(password) {
const SHA256= sha256.update(password);
var hash = SHA256.finalize();
return hash.toString(crypto.enc.Hex);
}
module.exports = getCipherKey;
for decryption:
const crypto = require('crypto');
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
const getCipherKey = require('./getCipherKey');
function decrypt({ file, password }) {
// First, get the initialization vector from the file.
// Once we’ve got the initialization vector, we can decrypt the file.
const cipherKey = getCipherKey(password);
const readStream = fs.createReadStream(file);
const decipher = crypto.createDecipher('aes256', cipherKey);
const unzip = zlib.createUnzip();
const writeStream = fs.createWriteStream(file + '.unenc');
readStream
.pipe(decipher)
.pipe(unzip)
.pipe(writeStream);
}
decrypt({ file: './video.mp4.enc', password: 'dogzrgr8' });
I tried referring multiple sites like link1 , link2 but it is returning error like as follows
internal/crypto/cipher:116 this[kHandle].initiv(cipher, credential,
iv, authTagLength); ^ Error: Unknown cipher
You need to provide an IV (initialization vector) as the third argument to crypto.createCipheriv.
function Encrypt({ file, password }){
const CIPHER_KEY = getCipherKey(password);
const iv = crypto.randomBytes(16);
const readStream = fs.createReadStream(file);
const zip = zlib.createGzip();
const cipher = crypto.createCipheriv('aes256', CIPHER_KEY, iv);
const writeStream = fs.createWriteStream(path.join(file + ".enc"));
writeStream.write(iv);
readStream
.pipe(zip)
.pipe(cipher)
.pipe(writeStream);
}
To decrypt the file, you need to read the IV from the beginning of the encrypted file and pass it to crypto.createDecipheriv.
function decrypt({ file, password }) {
const cipherKey = getCipherKey(password);
const readStream = fs.createReadStream(file);
const writeStream = fs.createWriteStream(file + '.unenc');
let iv;
readStream.once('readable', () => {
iv = readStream.read(16);
const decipher = crypto.createDecipheriv('aes256', cipherKey, iv);
const unzip = zlib.createGunzip();
readStream
.pipe(decipher)
.pipe(unzip)
.pipe(writeStream);
});
}

Listing all the declared slash command to a json file Discord js

I'm building a discord bot using discord.js. All of my source code is from the official discord.js guide website, and I'm wondering how I could list all declared slash commands to a JSON file commands.json.
Here is my code:
deploy_commands.js:
const { SlashCommandBuilder } = require('#discordjs/builders');
const { REST } = require('#discordjs/rest');
const { Routes } = require('discord-api-types/v9');
const { clientId, guildId, token } = require('./config.json');
const fs = require('node:fs');
const path = require('node:path');
const commands = [];
const commandsPath = path.join(__dirname, 'commands');
const commandFiles = fs.readdirSync(commandsPath).filter(file => file.endsWith('.js'));
for (const file of commandFiles) {
const filePath = path.join(commandsPath, file);
const command = require(filePath);
commands.push(command.data.toJSON());
}
const rest = new REST({ version: '9' }).setToken(token);
rest.put(Routes.applicationGuildCommands(clientId, guildId), { body: commands })
.then(() => console.log('Successfully registered application commands.'))
.catch(console.error);
index.js:
const { Client, Collection, Intents } = require("discord.js");
const client = new Client({intents: [Intents.FLAGS.GUILDS]});
const config = require("./config.json");
const { guildId, clientId, token } = require('./config.json');
const fs = require('node:fs');
const path = require('node:path');
client.commands = new Collection();
const commandsPath = path.join(__dirname, 'commands');
const commandFiles = fs.readdirSync(commandsPath).filter(file => file.endsWith('.js'));
for (const file of commandFiles) {
const filePath = path.join(commandsPath, file);
const command = require(filePath);
client.commands.set(command.data.name, command);
}
client.once('ready', () => {
console.log(`user : ${client.user.tag}\nguildid : ${guildId}\nclientid : ${clientId}`);
});
client.on('interactionCreate', async interaction => {
if (!interaction.isCommand()) return;
const command = client.commands.get(interaction.commandName);
if (!command) return;
try {
await command.execute(interaction);
} catch (error) {
console.error(error);
await interaction.reply({content: 'Sorry, there was a problem while executing this command, maybe try again later?', ephemeral: true});
}
});
client.login(token);
deploy_commands.js is a file for deploying commands, and what I want to do is to save all the declared slash commands and transfer all of them to a JSON file.
I personally suggest you the usage of map() to manage the array where you store every command inside deploy_commands.js.
Following you can find a solution that worked for me:
const fs = require('fs');
const commands = [...commandBuilders...].map(command => command.toJSON());
const commandsToString = JSON.stringify(commands);
fs.writeFile('commands.json', commandsToString, (e, res) =>{
if (e) console.log('ERROR: ' + e);
}

Can't upload files to Node.js

Can't upload files to the server. I've used npm install express-fileupload and also did the var fileUpload = require('express-fileupload') and app.use(fileUpload()). And while calling
router.post('/add-products',(req,res)=>{
console.log(req.body);
console.log(req.files.image);
It says cannot read image of null.
I can give you a working full example.
Project structure:
- storage (empty folder)
- routes
-> upload.route.js
- controllers
-> upload.controller.js
index.js
index.js
const express = require('express');
const app = express();
const route = require('./routes/upload.route');
app.use('/', route);
let port = 8000;
app.listen(port);
console.log(`API listens localhost:${port}`);
This is your upload.route.js
const express = require('express');
const router = express.Router();
const { uploadController } = require('../controllers/upload.controller');
router.use('/media/upload', uploadController);
module.exports = router;
This is upload.controller.js
const formidable = require('formidable');
const path = require('path');
exports.upload = async (req, res) => {
try {
// Receive the media and store it
let [uploadPath, filename] = await processUpload(req);
return res
.status(200)
.send({
success: 1,
message: "File uploaded",
filename,
uploadPath
});
} catch (error) {
return res
.status(400)
.send({
success: 0,
message: "Ops! Something went wrong",
errorObject: error.message
});
}
}
function processUpload(req) {
return new Promise((resolve, reject) => {
try {
let uploadDir = __dirname + `/../storage`;
// We used helper formidable package
let form = new formidable.IncomingForm()
form.multiples = true;
form.keepExtensions = true;
// Upload path
form.uploadDir = uploadDir;
let result;
form.on('fileBegin', function (name, file) {
if (!file.type) reject(new Error("No media specified!"));
const fileExt = path.extname(file.name);
let filename = "test" + fileExt;
file.path = path.join(uploadDir, filename);
// Return the path where file uploaded
result = [file.path, uuid];
});
form.parse(req, (err, fields, files) => {
if (err) return reject("Upload failed.");
resolve(result);
});
} catch (error) {
reject("Upload failed.");
}
});
}
When you call localhost:8000/media/upload with a POST or PUT request with postman form-data. You can see the uploaded file under the storage folder in the project.
Let me know if something goes wrong with the code
Note: You need to use formidable (For uploading) package to run the example

MulterError: Unexpected field when i need to upload multiple file in nodejs

i want uplaod tow file in one request in nodejs and i using the moulter for this work .
this is my request in postman :
and i using the multer in routing :
router.post(
"/Create",
UploadProfileHandler.single("signerProfile"),
UploadPosterHandler.single("signerPoster"),
SignerValidation.CreateHandle(),
SignerController.CreateSigner
);
and this isnto the multer :
signer Profile Multer :
const multer = require("multer");
const fs = require("fs");
const mkdirp = require("mkdirp");
const path = require("path");
const GetDirectory = () => {
let year = new Date().getFullYear();
let month = new Date().getMonth();
let day = new Date().getDay();
return `src/public/uploads/signer/profile/${year}/${month}/${day}`;
};
const SignerStorage = multer.diskStorage({
destination: (req, file, cb) => {
console.log(file,req.body)
let dir = GetDirectory();
mkdirp(dir).then((made) => {
cb(null, dir);
});
},
filename: (req, file, cb) => {
let fileName = GetDirectory() + "/" + file.originalname;
cb(null, file.originalname);
},
});
const UploadSigner = multer({
storage: SignerStorage,
});
module.exports = UploadSigner;
and this is singer Poster Multer :
const multer = require("multer");
const fs = require("fs");
const mkdirp = require("mkdirp");
const path = require("path");
const GetDirectory = () => {
let year = new Date().getFullYear();
let month = new Date().getMonth();
let day = new Date().getDay();
return `src/public/uploads/signer/poster/${year}/${month}/${day}`;
};
const SignerStorage = multer.diskStorage({
destination: (req, file, cb) => {
let dir = GetDirectory();
mkdirp(dir).then((made) => {
cb(null, dir);
});
},
filename: (req, file, cb) => {
let fileName = GetDirectory() + "/" + file.originalname;
cb(null, file.originalname);
},
});
const UploadSigner = multer({
storage: SignerStorage,
});
module.exports = UploadSigner;
But when I want to upload both files at the same time it show me this error :
MulterError: Unexpected field
at wrappedFileFilter (F:\Projects\Nodejs\SalesSignal\node_modules\multer\index.js:40:19)
at Busboy. (F:\Projects\Nodejs\SalesSignal\node_modules\multer\lib\make-middleware.js:114:7)
at Busboy.emit (events.js:315:20)
at Busboy.emit (F:\Projects\Nodejs\SalesSignal\node_modules\busboy\lib\main.js:38:33)
at PartStream. (F:\Projects\Nodejs\SalesSignal\node_modules\busboy\lib\types\multipart.js:213:13)
at PartStream.emit (events.js:315:20)
at HeaderParser. (F:\Projects\Nodejs\SalesSignal\node_modules\dicer\lib\Dicer.js:51:16)
at HeaderParser.emit (events.js:315:20)
at SBMH.emit (events.js:315:20)
at SBMH._sbmh_feed (F:\Projects\Nodejs\SalesSignal\node_modules\streamsearch\lib\sbmh.js:159:14)
at SBMH.push (F:\Projects\Nodejs\SalesSignal\node_modules\streamsearch\lib\sbmh.js:56:14)
at HeaderParser.push (F:\Projects\Nodejs\SalesSignal\node_modules\dicer\lib\HeaderParser.js:46:19)
at Dicer._oninfo (F:\Projects\Nodejs\SalesSignal\node_modules\dicer\lib\Dicer.js:197:25)
at SBMH. (F:\Projects\Nodejs\SalesSignal\node_modules\dicer\lib\Dicer.js:127:10)
whats the problem ? how can i solve this problem ???
To handle multiple fields you cannot call the .single() middleware multiple times, instead you should use .array() or .fields(). Here's an example how you would use the latter:
app.post("/upload", upload.fields([
{name: 'signerPoster'},
{name: 'signerProfile'}
]), (req, res) => {
// do something with req.files and req.body
});
The uploaded files will be populated under req.files.signerPoster and req.files.signerProfile and req.body will contain the text-based fields.

Overwriting files in Node server

So I have a server that temporarily saved the files in it's memory, before I upload them to the database. Here's my code:
uploadImage(file, uid, res) {
var fs = require('fs');
mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
const writeStream = gfs.createWriteStream({
filename: file.filename,
});
fs.createReadStream(file.path).pipe(writeStream);
writeStream.on('close', file => {
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},
I am trying to optimize the images before uploading them to the database, like this:
async uploadImage(file, uid, res) {
const imagemin = require('imagemin');
const imageminJpegtran = require('imagemin-jpegtran');
const imageminPngquant = require('imagemin-pngquant');
console.log(1);
// const newFilePath = `${file.path}optimized`;
const newFile = await imagemin([file.path], file.path, {
plugins: [
imageminJpegtran(),
imageminPngquant({quality: '65-80'})
]
});
// newFile.path = newFilePath;
console.log(2);
console.log(file);
console.log(newFile);
var fs = require('fs');
await mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
const writeStream = gfs.createWriteStream({
filename: newFile.filename,
});
fs.createReadStream(newFile.path).pipe(writeStream);
writeStream.on('close', file => {
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},
But this tells me EEXIST: file already exists, mkdir '/home/alex/Documents/Projects/ontario-job-portal/public/images/logos/b80872b65d18d09bb498abcabe2f3f94', which is true. I'm trying to overwrite the previous image.
How can I make it overwrite the file though?
Creating a new file results in a permission denied error.
Edit. IMPORTANT
Turns out, that the object returned by the imagemin function is a little different from the previous one. Here's my working code:
async uploadImage(file, uid, res) {
const imagemin = require('imagemin');
const imageminJpegtran = require('imagemin-jpegtran');
const imageminPngquant = require('imagemin-pngquant');
console.log(1);
const path = require("path");
const newFilePath = path.dirname(file.path);
const newFile = await imagemin([file.path], path.dirname(file.path), {
plugins: [
imageminJpegtran(),
imageminPngquant({quality: '65-80'})
]
});
newFile.path = newFilePath;
newFile.filename = newFile[0].path.replace(/public\/images\/logos\//, '');
console.log(newFile.filename);
var fs = require('fs');
await mongoose.connect(config.db, {useNewUrlParser: true},).catch(e => console.log(e));
var conn = mongoose.connection;
Grid.mongo = mongoose.mongo;
const gfs = Grid(conn.db);
console.log(2);
const writeStream = gfs.createWriteStream({
filename: newFile[0].filename,
});
console.log(3);
fs.createReadStream(newFile[0].path).pipe(writeStream);
console.log(4);
writeStream.on('close', file => {
console.log(5);
const {_id} = file;
return Account.findByIdAndUpdate(uid, {'employer.logo': _id}).then(() => res.redirect('/employer')).catch(e => console.log(e));
});
},
It looks like you are providing the path of an existing file to imagemin where a directory is required. To extract the directory from the path, use path.dirname(file.path):
const path = require("path");
const newFile = await imagemin([file.path], path.dirname(file.path), {
...
Note that this won't overwrite the existing file. It doesn't appear that imagemin supports that directly. You could do it yourself manually using fs if you wanted, but I'm not sure why you would. It seems like you are interested in using these as temporary files. You may want to add some code to delete the files after they have been written to mongo.

Categories

Resources