In order to encrypt the files I save some of their info to info.json and then encrypt that. That creates a new file named info.dat. I then attempt to delete the original info.json file because it has decryption keys and file names. But, for some reason it doesn't delete. I have a 'vice-versa like' process for decryption where I have to delte info.dat and that seems to work fine. What I think the problem might be (I could be wrong) is when the info.json file is getting info written to it, the function to delete it is also being called, causing nothing to happen. Is this the issue? If not what is?
const fs = require('fs'), encryptor = require('file-encryptor'), info = require('./info');
const dir = './Encrypted Files/';
const encryptFiles = () => {
fs.readdir(dir, (err, files) => {
let key = 'lol';
files.forEach(file => {
const date = new Date();
const name = `encrypted~${date.getMonth() + 1}-${date.getDate()}-${date.getFullYear()}~${date.getTime()}.dat`;
encryptor.encryptFile(dir + file, dir + name, key, (err) => {
if (err) {
throw err
} else {
console.log(`Successfully encrypted '${file}'`);
info.key = key;
info.files.push({
enName: name,
deName: file,
});
fs.writeFile('./info.json', JSON.stringify(info), (err) => {if (err) throw err});
}
});
fs.unlinkSync(dir + file);
});
infoJSON.encrypt(key);
});
};
const infoJSON = {
clear: () => {
const clearInfo = {"files": [], "key": ""}
fs.writeFile('./info.json', JSON.stringify(clearInfo), (err) => {if (err) throw err});
},
encrypt: (key) => {
encryptor.encryptFile('./info.json', './info.dat', key, (err) => {
if (err) {
throw err
}
})
fs.unlinkSync('./info.json', (err) => {if (err) throw err}); //THIS Doesn't work!!!
},
decrypt: (key) => {
encryptor.decryptFile('./info.dat', './info.json', key, (err) => {
if (err) {
throw err;
}
});
fs.unlinkSync('./info.dat');
}
};
const decryptFiles = (key) => {
infoJSON.decrypt(key);
fs.readdir(dir, (err, files) => {
files.forEach(file => {
let fileName = '';
info.files.forEach(fileInfo => {
(fileInfo.enName == file) && (fileName = fileInfo.deName)
});
encryptor.decryptFile(dir + file, dir + fileName, info.key, (err) => {
(err) ? console.log(err) : console.log(`Successfully decrypted '${file}'`);
});
fs.unlinkSync(dir + file);
});
});
infoJSON.clear();
};
//encryptFiles()
//decryptFiles("lol")
/*
encryption: encrypt files => add encryption info to info.json => encrypt info.json
decryption: decrypt info.json => decrypt files => clear info.json
*/
Related
How can I build an ETL pipeline script to un-gunzip, extract, transform, save and gunzip files? I am able to get up to un-gunzip, but I am unable to extract, transform, save, and gunzip. I was attempting to follow this tutorial to get me started: https://www.mariokandut.com/transform-data-etl-pipeline-node-js/ One thing that I'm stuck on is how to loop through files after each sequential step. I get an unexpected error SyntaxError: Unexpected end of JSON input during the extract step.
I was able to extract, transform, and save in a separate example. Although I am unable to successfully combine it into this ETL pipeline script.
const fs = require('fs');
const {promises: {readdir, readFile, writeFile}} = require("fs");
var url = require('url');
const zlib = require('zlib');
const input_dir = __dirname + '/input'
const input_unzipped_dir = __dirname + '/input-unzipped'
const output_dir = __dirname + '/output'
async function get_files(dir) {
return await readdir(dir).then(response =>
response
);
}
function read_file(file_path, callback) {
fs.readFile(file_path, 'utf-8', (err, file_data) => {
if (err) {
return callback && callback(err);
}
try {
const object = JSON.parse(file_data);
return callback && callback(null, object);
} catch (err) {
return callback && callback(err);
}
})
}
function transform_JSON(file_data) {
console.log("ts is:", file_data.ts); // => "timestamp"
console.log("u is:", file_data.u); // => "url"
console.log("e is:", file_data.e); // => "event"
console.log(url.parse(file_data.u))
u = url.parse(file_data.u)
const query_map = new Map(Object.entries(file_data.e));
const output = {
timestamp: file_data.ts,
url_object: {
domain: u.host,
path: u.path,
query_object: query_map,
hash: u.hash,
},
ec: file_data.e,
}
const jsonString = JSON.stringify(output)
console.log(jsonString)
return jsonString
}
const orchestrate_etl_pipeline = async () => {
try {
// extract
files = await get_files(input_dir);
console.log(files);
if (!fs.existsSync(input_unzipped_dir)){
fs.mkdirSync(input_unzipped_dir);
}
Promise.all(files.map(filename => {
if (filename.endsWith('.gz')) {
return new Promise((resolve, reject) => {
const fileContents = fs.createReadStream(`${input_dir}/${filename}`);
const writeStream = fs.createWriteStream(`${input_unzipped_dir}/${filename.slice(0, -3)}`);
const unzip = zlib.createGunzip();
fileContents.pipe(unzip).pipe(writeStream).on('finish', (err) => {
if (err) return reject(err);
else resolve();
})
})
}
}))
.then(
console.log('unzip done')
);
// transform
files_unzipped = await get_files(input_unzipped_dir);
Promise.all(files_unzipped.map(filename => {
if (filename.endsWith('.json')) {
read_file(`${input_unzipped_dir}/${filename}`, (err, file_data) => {
if (err) {
console.error(err);
return
}
transform_JSON = transform_JSON(file_data)
console.log(transform_JSON)
})
}
}))
.then(
console.log('transform done')
);
// save file
// gunzip file
} catch (error) {
console.log(error);
}
}
orchestrate_etl_pipeline().then(console.log('etl done'));
Separate transform and save file example:
function jsonReader(file_path, callback) {
fs.readFile(file_path, (err, file_data) => {
if (err) {
return callback && callback(err);
}
try {
const object = JSON.parse(file_data);
return callback && callback(null, object);
} catch (err) {
return callback && callback(err);
}
});
}
jsonReader(`${input_zipped_dir}/t1669976028340.json`, (err, input) => {
if (err) {
console.log(err);
return;
}
console.log("ts is:", input.ts); // => "ts"
console.log("u is:", input.u); // => "u"
console.log("e is:", input.e); // => "e"
console.log(url.parse(input.u))
u = url.parse(input.u)
const query_map = new Map(Object.entries(input.e));
const output = {
timestamp: input.ts,
url_object: {
domain: u.host,
path: u.path,
query_object: query_map,
hash: u.hash,
},
ec: input.e,
}
jsonString = JSON.stringify(output)
console.log(jsonString)
fs.writeFile(`${input_zipped_dir}/t1669976028340.json`, jsonString, err => {
if (err) {
console.log('Error writing file', err)
} else {
console.log('Successfully wrote file')
}
})
})
I'm trying to develop a simple app that if you pass a parameter in command line the application will search inside a directory and if the text match in some of the files the file should be save in a list, but when I put the console.log the value is not updated
here is my code:
const folder = "./movies/data";
const fs = require("fs");
var args = process.argv.slice(2);
console.log("myArgs: ", args);
var count = 0;
var list = [];
fs.readdir(folder, (err, files) => {
files.forEach((file) => {
fs.readFile(`movies/data/${file}`, "utf8", function (err, data) {
if (err) console.log(err);
if (data.includes("walt disney")) {
count++;
list.push(data);
console.log("Found in: ", data);
}
});
});
console.log(`Foram encontradas ${count} ocorrĂȘncias pelo termo ${args}.`);
});
any suggestions about what i'm doing wrong?
For your program to work, you will have to add some Promise / async/await logic. On the moment you try to read from the files, the files are still undefined so the fs.readDir() function will not provide the wanted result.
This should work:
const { resolve } = require('path');
const { readdir } = require('fs').promises;
const fs = require("fs");
var args = process.argv.slice(2);
const pathToFiles = "./movies/";
async function getFiles(dir) {
const dirents = await readdir(dir, { withFileTypes: true });
const files = await Promise.all(dirents.map((dirent) => {
const res = resolve(dir, dirent.name);
return dirent.isDirectory() ? getFiles(res) : res;
}));
return Array.prototype.concat(...files);
}
getFiles(pathToFiles)
.then(files => {
console.log(files)
files.forEach((file) => {
fs.readFile(file, 'utf8', (err, data) => {
if (err) console.log(err);
if (data.includes(args)) {
console.log(`${args} found in ${file}.`);
} else {
console.log(`${args} not found.`);
}
});
})
})
.catch (e => console.error(e));
I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}
I am currently working on a project that takes incoming messages from Amazons SQS (Simple Queue Service).
I have established the connection to the SQS service and receiving messages.
The receiving message is in the following format:
{ MessageId: '124a42b-657d-481d-348f-ddd9b8d8b143b',
ReceiptHandle: 'AQEBSSVzlCbqsSUQ3E.....',
MD5OfBody: '7ba46c7c8874fc6d0c4a141a2d3d4d5a721',
Body: '10987235#PUBLISH;aSeIgjS78Iy4KRZHSeAy43...' }
The Message Body is encrypted, I been given the password to decrypt it.
My question is how do I decrypt the message Body in nodeJS?
Any examples would be much appreciated!
UPDATE:
Here is a copy of my code so far:
const Consumer = require('sqs-consumer');
const AWS = require('aws-sdk');
const fs = require('fs');
const path = require("path");
const unzipper = require('unzipper');
const app = Consumer.create({
queueUrl: 'https://sqs.us-west-1.amazonaws.com/QueueName',
handleMessage: (message, done) => {
saveToFile(message.Body)
.then(() => {
return unzipFile();
})
.then((success) => {
// read content of XML file and save to DB
console.log('success:', success);
})
.catch(e => {
console.log('Error:', e);
});
},
sqs: new AWS.SQS()
});
app.on('error', (err) => {
console.log(err.message);
});
app.start();
const saveToFile = (message) => {
debugger;
console.log('saveToFile fired!');
return new Promise((resolve, reject) => {
fs.appendFile(path.resolve(__dirname) + '/zips/decodeMsg.zip', message, (err) => {
if (err) reject(error);
resolve();
});
});
}
const unzipFile = () => {
return unzipper.Open.file(path.resolve(__dirname) + '/zips/decodeMsg.zip') // Fails To Read The File Here.
.then(function (directory) {
console.log('directory', directory);
return new Promise(function (resolve, reject) {
directory.files[0].stream(password) // PASSING IN PASSWORD HERE
.pipe(fs.createWriteStream('readablefile')) // should be XML
.on('error', reject)
.on('finish', resolve)
});
});
}
Update again:
Okay here I am trying to just decrypt the message Body with the following code.
var crypto = require("crypto")
require('./config');
function decrypt(key, data) {
var decipher = crypto.createDecipher('aes-256-cbc', key);
var decrypted = decipher.update(data, 'hex', 'utf-8');
decrypted += decipher.final('utf-8');
return decrypted;
}
decryptedText = decrypt(process.env.password, process.env.encryptedMessage);
console.log("Decrypted Text: " + decryptedText);
However I am getting the following error in the console:
var ret = this._handle.update(data, inputEncoding);
^
TypeError: Bad input string
at TypeError (native)
at Decipher.Cipher.update (crypto.js:145:26)
at decrypt (/Users/dave/Tests/sqs-consumer/app.js:6:34)
I am using Node.js file system to build an array of file paths. I would like to know when all files have been read, so I could work further with my array.
Sequence of events:
Go into a folder
Get a path of each file
Put each path into an array
Let me know once you're done
Code:
'use strict';
const fs = require('fs');
function readDirectory(path) {
return new Promise((resolve, reject) => {
const files = [];
fs.readdir(path, (err, contents) => {
if (err) {
reject(err);
}
contents.forEach((file) => {
const pathname = `${ path }/${ file }`;
getFilesFromPath(pathname).then(() => {
console.log('pathname', pathname);
files.push(pathname);
});
resolve(files);
});
});
});
}
function getFilesFromPath(path) {
return new Promise((resolve, reject) => {
const stat = fs.statSync(path);
if (stat.isFile()) {
fs.readFile(path, 'utf8', (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
} else if (stat.isDirectory()) {
readDirectory(path);
}
});
}
getFilesFromPath('./dist');
Would be great to glue with:
Promise.all(files).then(() => {
// do stuff
})
Your suggestion pretty much works - did you try it? Here's a typical way of doing it:
getFilesFromPath( path ).then( files => {
const filePromises = files.map( readFile );
return Promises.all( filePromises );
}).then( fileContentsArray => {
//do stuff - the array will contain the contents of each file
});
You'll have to write the "readFile()" function yourself, but looks like you got that covered.