I am currently working on a project that takes incoming messages from Amazons SQS (Simple Queue Service).
I have established the connection to the SQS service and receiving messages.
The receiving message is in the following format:
{ MessageId: '124a42b-657d-481d-348f-ddd9b8d8b143b',
ReceiptHandle: 'AQEBSSVzlCbqsSUQ3E.....',
MD5OfBody: '7ba46c7c8874fc6d0c4a141a2d3d4d5a721',
Body: '10987235#PUBLISH;aSeIgjS78Iy4KRZHSeAy43...' }
The Message Body is encrypted, I been given the password to decrypt it.
My question is how do I decrypt the message Body in nodeJS?
Any examples would be much appreciated!
UPDATE:
Here is a copy of my code so far:
const Consumer = require('sqs-consumer');
const AWS = require('aws-sdk');
const fs = require('fs');
const path = require("path");
const unzipper = require('unzipper');
const app = Consumer.create({
queueUrl: 'https://sqs.us-west-1.amazonaws.com/QueueName',
handleMessage: (message, done) => {
saveToFile(message.Body)
.then(() => {
return unzipFile();
})
.then((success) => {
// read content of XML file and save to DB
console.log('success:', success);
})
.catch(e => {
console.log('Error:', e);
});
},
sqs: new AWS.SQS()
});
app.on('error', (err) => {
console.log(err.message);
});
app.start();
const saveToFile = (message) => {
debugger;
console.log('saveToFile fired!');
return new Promise((resolve, reject) => {
fs.appendFile(path.resolve(__dirname) + '/zips/decodeMsg.zip', message, (err) => {
if (err) reject(error);
resolve();
});
});
}
const unzipFile = () => {
return unzipper.Open.file(path.resolve(__dirname) + '/zips/decodeMsg.zip') // Fails To Read The File Here.
.then(function (directory) {
console.log('directory', directory);
return new Promise(function (resolve, reject) {
directory.files[0].stream(password) // PASSING IN PASSWORD HERE
.pipe(fs.createWriteStream('readablefile')) // should be XML
.on('error', reject)
.on('finish', resolve)
});
});
}
Update again:
Okay here I am trying to just decrypt the message Body with the following code.
var crypto = require("crypto")
require('./config');
function decrypt(key, data) {
var decipher = crypto.createDecipher('aes-256-cbc', key);
var decrypted = decipher.update(data, 'hex', 'utf-8');
decrypted += decipher.final('utf-8');
return decrypted;
}
decryptedText = decrypt(process.env.password, process.env.encryptedMessage);
console.log("Decrypted Text: " + decryptedText);
However I am getting the following error in the console:
var ret = this._handle.update(data, inputEncoding);
^
TypeError: Bad input string
at TypeError (native)
at Decipher.Cipher.update (crypto.js:145:26)
at decrypt (/Users/dave/Tests/sqs-consumer/app.js:6:34)
Related
I am seeing the following error when trying to extract an image's metadata information with the Sharp module: "Input file contains unsupported image format".
This is only happening for certain signed image urls, particularly ones that contain xmp information in the metadata.
I am hoping someone can help me spot where the issue might be occurring in this code snippet.
Here is the exact code snippet I am using (insert the signed image URL where specified in the doStuff function to test):
const sharp = require("sharp");
const fs = require('fs');
const fetch = require('node-fetch');
async function storeUrlToLocal(sourceUrl) {
const destPath = './';
const request = {
method: 'GET',
encoding: null,
};
response = await fetch(sourceUrl, request);
if (response.status >= 400)
throw new Error(`Failed to fetch data from ${sourceUrl}, status returned = ${response.status}`);
const localPath = `${destPath}test.png`;
const fileStream = fs.createWriteStream(localPath);
return new Promise((resolve, reject) => {
response.body.pipe(fileStream);
response.body.on("error", reject);
response.body.on("end", async () => {
const fileExists = fs.existsSync(localPath);
console.log(`All the data in the file has been read ${localPath} = ${fileExists}`);
resolve(localPath);
});
response.body.on("finish",() => {
console.log('All writes are now complete.');
});
}).catch(error => {
console.log(error);
});
}
async function doStuff() {
const localFilePath = await storeUrlToLocal('<INSERT_IMAGE_URL_HERE>');
// Read image file and extract metadata
let manipulator;
let imageMetadata;
try {
manipulator = sharp(localFilePath, { limitInputPixels: 5000000000 });
console.log('Manipulator = ', manipulator);
imageMetadata = await manipulator.metadata();
console.log("ImageMetadata = ", imageMetadata);
} catch (error) {
console.log(`Image Metadata Extraction Error: ${error.message}`);
throw error;
}
}
doStuff();
This code snippet above fails with the "Input file contains unsupported image format" on the line that extracts metadata (imageMetadata = await manipulator.metadata();)
So the strange thing is, I am able to properly extract the metadata (with no errors) with this same code if I add a short Sleep after this line: const fileStream = fs.createWriteStream(localPath);
So this code snippet (all I'm doing here is adding a short sleep after fs.createWriteSteam) allows the image metadata to be extracted without issue:
const sharp = require("sharp");
const fs = require('fs');
const fetch = require('node-fetch');
async function storeUrlToLocal(sourceUrl) {
const destPath = './';
const request = {
method: 'GET',
encoding: null,
};
response = await fetch(sourceUrl, request);
if (response.status >= 400)
throw new Error(`Failed to fetch data from ${sourceUrl}, status returned = ${response.status}`);
const localPath = `${destPath}test.png`;
const fileStream = fs.createWriteStream(localPath);
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
await sleep(1000);
return new Promise((resolve, reject) => {
response.body.pipe(fileStream);
response.body.on("error", reject);
response.body.on("end", async () => {
const fileExists = fs.existsSync(localPath);
console.log(`All the data in the file has been read ${localPath} = ${fileExists}`);
resolve(localPath);
});
response.body.on("finish",() => {
console.log('All writes are now complete.');
});
}).catch(error => {
console.log(error);
});
}
async function doStuff() {
const localFilePath = await storeUrlToLocal('<INSERT_IMAGE_URL_HERE>');
// Read image file and extract metadata
let manipulator;
let imageMetadata;
try {
manipulator = sharp(localFilePath, { limitInputPixels: 5000000000 });
console.log('Manipulator = ', manipulator);
imageMetadata = await manipulator.metadata();
console.log("ImageMetadata = ", imageMetadata);
} catch (error) {
console.log(`Image Metadata Extraction Error: ${error.message}`);
throw error;
}
}
doStuff();
Why would this Sleep resolve my issues? I don't see any asynchronous calls being run that I would need to be waiting for to complete. Perhaps fs.createWriteStream didn't have enough time to complete its operation? But I do not have the option to await the call to fs.createWriteStream, as it is not async.
In order to encrypt the files I save some of their info to info.json and then encrypt that. That creates a new file named info.dat. I then attempt to delete the original info.json file because it has decryption keys and file names. But, for some reason it doesn't delete. I have a 'vice-versa like' process for decryption where I have to delte info.dat and that seems to work fine. What I think the problem might be (I could be wrong) is when the info.json file is getting info written to it, the function to delete it is also being called, causing nothing to happen. Is this the issue? If not what is?
const fs = require('fs'), encryptor = require('file-encryptor'), info = require('./info');
const dir = './Encrypted Files/';
const encryptFiles = () => {
fs.readdir(dir, (err, files) => {
let key = 'lol';
files.forEach(file => {
const date = new Date();
const name = `encrypted~${date.getMonth() + 1}-${date.getDate()}-${date.getFullYear()}~${date.getTime()}.dat`;
encryptor.encryptFile(dir + file, dir + name, key, (err) => {
if (err) {
throw err
} else {
console.log(`Successfully encrypted '${file}'`);
info.key = key;
info.files.push({
enName: name,
deName: file,
});
fs.writeFile('./info.json', JSON.stringify(info), (err) => {if (err) throw err});
}
});
fs.unlinkSync(dir + file);
});
infoJSON.encrypt(key);
});
};
const infoJSON = {
clear: () => {
const clearInfo = {"files": [], "key": ""}
fs.writeFile('./info.json', JSON.stringify(clearInfo), (err) => {if (err) throw err});
},
encrypt: (key) => {
encryptor.encryptFile('./info.json', './info.dat', key, (err) => {
if (err) {
throw err
}
})
fs.unlinkSync('./info.json', (err) => {if (err) throw err}); //THIS Doesn't work!!!
},
decrypt: (key) => {
encryptor.decryptFile('./info.dat', './info.json', key, (err) => {
if (err) {
throw err;
}
});
fs.unlinkSync('./info.dat');
}
};
const decryptFiles = (key) => {
infoJSON.decrypt(key);
fs.readdir(dir, (err, files) => {
files.forEach(file => {
let fileName = '';
info.files.forEach(fileInfo => {
(fileInfo.enName == file) && (fileName = fileInfo.deName)
});
encryptor.decryptFile(dir + file, dir + fileName, info.key, (err) => {
(err) ? console.log(err) : console.log(`Successfully decrypted '${file}'`);
});
fs.unlinkSync(dir + file);
});
});
infoJSON.clear();
};
//encryptFiles()
//decryptFiles("lol")
/*
encryption: encrypt files => add encryption info to info.json => encrypt info.json
decryption: decrypt info.json => decrypt files => clear info.json
*/
Here is the index.ts script I am running (based on something I found on reddit):
const path = require("path");
const sql = require("mssql");
const config = require(path.resolve("./config.json"));
let db1;
const connect = () => {
return new Promise((resolve, reject) => {
db1 = new sql.ConnectionPool(config.db, err => {
if (err) {
console.error("Connection failed.", err);
reject(err);
} else {
console.log("Database pool #1 connected.");
resolve();
}
});
});
};
const selectProjects = async (name) => {
const query = `
select * from [Time].ProjectData where [Name] like concat('%', concat(#name, '%'))`;
const request = new sql.Request(db1);
const result = await request
.input("name", name)
.query(query);
return result.recordset;
};
module.exports = {
connect,
selectProjects
};
connect().then(function() {
console.log(selectProjects('General'));
}).catch(function(err) {
console.log(err);
});
When I run the script using node index (after compiling it of course), I get this in the console:
Database pool #1 connected.
Promise { <pending> }
And then the script hangs.
Apparently the await keyword creates an implicit promise; I had to change the last function call to:
connect().then(function() {
selectProjects('General').then(function(data) {
console.log(data);
});
}).catch(function(err) {
console.log(err);
});
so I am new to async/await on node.js and I could use some help figuring out this code.
I'm trying to get a file from a ftp server via the 'ftp' package, to write the data into a local 'data.txt' and to open it later in the code. My problem is that I don't understand how to make sure the file is completely written in the 'data.txt' before trying to open it with fs.readFileSync().
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient();
FtpClient.on('ready', async function() {
await new Promise(resolve =>
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err;
stream.once('close', () => {FtpClient.end();});
// Stream written in data.txt
const Streampipe = stream.pipe(fs.createWriteStream('data.txt')).on('finish', resolve)
})
)
})
FtpClient.connect(ConfigFTP);
var Data = fs.readFileSync('data.txt', 'utf8');
I'm not sure what you want to accomplish, but you can do something like these:
1)
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient()
let writeStream = fs.createWriteStream('data.txt')
FtpClient.on('ready', async function () {
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err
stream.once('close', () => { FtpClient.end() })
// Stream written in data.txt
const Streampipe = stream.pipe(writeStream)
})
})
FtpClient.connect(ConfigFTP)
writeStream.on('finish', () => {
var Data = fs.readFileSync('data.txt', 'utf8')
})
2)
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient()
FtpClient.on('ready', async function() {
await new Promise(resolve =>
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err
stream.once('close', () => {FtpClient.end()})
// Stream written in data.txt
const Streampipe = stream.pipe(fs.createWriteStream('data.txt')).on('finish', resolve)
})
)
var Data = fs.readFileSync('data.txt', 'utf8')
})
FtpClient.connect(ConfigFTP)
I'm building a server in Node that will search a folder to see if an XML file exists (glob), and if it does, read the file in (fs) as a JSON object (xml2js) and eventually store it in a database somewhere. I'm want to get the results OUT of the parser and into another variable so I can do other things with the data. From what I can tell, something is running synchronously, but I can't figure out how to stop it and for me to wait until it's finished to continue moving on.
I'm separating my function out into a controller elsewhere from app.js:
app.controller.js
const fs = require('fs-extra');
const glob = require('glob');
const xml2js = require('xml2js');
exports.requests = {};
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
// this is the only place await works...
await glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
var parser = new xml2js.Parser();
// this is looking for a specific file now, which I'll address later once I can figure out this issue
fs.readFile('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le', function (err, data) {
if (err) {
console.log('ERROR: ', err);
} else {
parser.parseString(data, (err, result) => {
if (err) {
console.log('ERROR: ', err);
} else {
console.log('data found');
exports.requests = JSON.stringify(result.Records.Record);
// data is outputted here correctly
console.log(exports.requests);
// this doesn't even seem to want to save to exports.requests anyways...
}
});
}
});
});
}
app.js
const appController = require('./controllers/app.controller');
// check if there is file in filedrop
appController.checkFileDrop();
// prints out an empty object
console.log(appController.requests);
// can't do anything if it doesn't exist yet
appController.saveToDB(appController.requests);
await will wait for a Promise value to resolve, otherwise it'll just wrap the value it is given in a promise and resolve the promise right away. In your example,
await glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
the call to glob does not return a Promise, so the await is essentially useless. So you need to create the promise yourself.
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
const files = await new Promise((resolve, reject) => glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
if (err) reject(err);
else resolve(files);
});
const parser = new xml2js.Parser();
const data = await new Promise((resolve, reject) => fs.readFile('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le', function (err, data) {
if (err) reject(err);
else resolve(data);
});
const result = await new Promise((resolve, reject) => parser.parseString(data, (err, result) => {
if (err) reject(err);
else resolve(result);
});
console.log('data found');
const requests = JSON.stringify(result.Records.Record);
console.log(requests);
}
Note that now this function will reject the promise it returns instead of force-logging the error.
You can also condense this down with a helper. Node 8 for instance includes util.promisify to make code like this easier to write, e.g.
const util = require('util');
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
const files = await util.promisify(glob)('./filedrop/ALLREQUESTS-*.xml');
const parser = new xml2js.Parser();
const data = await util.promisify(fs.readFile)('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le');
const result = await util.promisify(parser.parseString.bind(parser))(data);
console.log('data found');
const requests = JSON.stringify(result.Records.Record);
console.log(requests);
}
You can use async/await
import fs from 'fs';
import { promisify } from 'util';
const xmlToJson = async filePath => {
const parser = new xml2js.Parser
try {
const data = await fs.promises.readFile(filePath, 'utf8')
const result = await promisify(parser.parseString)(data);
const requests = JSON.stringify(result.merchandiser.product);
return requests
}
catch(err) {
console.log(err)
}
}