What I'm trying to do:
get some Azure storage blobs from container DIDE and encrypt them with RSA 2048 and upload them in other container called encrypted-dide
These blobs are downloaded through a stream(here Microsoft did a good job https://learn.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-nodejs#upload-blobs-to-a-container) and recomposed by func. streamToString(readableStream)
(I'm not using openpgp JS streams as I don't know if Microsoft streams are the same with NodeJs ones)
My code works as expected with unecrypted text and upload blobs in the supposedly encryped container encrypted-dide
I have followed the official documentation of openpgp js and some Internet resources.
The error I am getting is Error: Parameter [message] needs to be of type Message in openpgp JS
the publicKey is harcoded in the file keys.js and and is exported like this:
const publicKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
xsBNBGDgi3gBCADcZqIcczPDAx3+os5cCFVgaoT62Y+5rRvwPGPaPKKA1ajw
7NvoxS0RsJbqYAwNk0IEneoZvgSPpqkehGQCOBdsEhjcEgxVxaSlbbgPJkPh
avjTBDUhr6pIUc+MkSX7eh5LdkgWRSfzZdLXX2es5ycM5R1ZryzPTAenZh7D
l1g1x9TsyX+scI7gAtuyfbzAybYVqYMIvcHYZdIi8m6pGmxIqb0QW6sgO6nG
GyjVgxLDyMnHzYMInFRmKUV8XUUw9ECLZ6ioW4rthmpjoswh9vmP6vWI9OL/
Y7Zb3xY5XnIT6UFSpAHS5V/TNbEUD/EpoNtEI30bUl2X35UM277fUxetABEB
AAHNG0pvbiBTbWl0aCA8am9uQGV4YW1wbGUuY29tPsLAigQQAQgAHQUCYOCL
eAQLCQcIAxUICgQWAAIBAhkBAhsDAh4BACEJEGHAYnRSOf5GFiEExGMJvxnV
v1dXecI0YcBidFI5/kY5PAgAxL10QcUZIrxRXQIrqk04ZDhO4ehMirPqH/KT
L/MeHppHFqV06Fm4JDAOpGyh8lgleLwP4P9Lrod3AVSOKSX48u+UM/Bo4LtG
foAntS+tC9RjWlpR6PZ0aJA8SqHKLCnkaUvz7wv/M55fgGxeeQbhOLutNxN4
L8rCNhPo3UbWwoB+ifgQ9S4bv4kgyJxXYinyGYG0CD67YxQKxiAt58qjsdmK
x4uKCoFbHd1Oa4wfr6ezXet+2hCQvsf8eJV88+qL7TmpSe3ypiTWHNgxymNx
v77SlOkkzayJVWxrWtFU8ZoatlsfOP3A5tToio2rEhCHcnqYl4KtF1a0WUR8
KG+pJc7ATQRg4It4AQgA0Q2uZL9TIqGWtNzeAygdG0C3o+D+MoEYI/Qx0A6X
aZv7/1v84V++lRD0iuIMUlBgFEJWMsHF7cN1EMlUV1lRxOzyKTv+0FqyoSTr
bWexA+jG/Nb3Q8vSX1+eVHvm1+2H7AGhBH2szVmXeH15bGNaOaw03EmG5pCh
CIaCoXYUXKoavsa+C8827lGSuqLs1uRniCmIjQvkQSZg7a0IH6dpMIpxdHPh
h9Zyt8e74WwfmXW/be6cjWRI9FgBzl9U5EQEEVO1JdLvfzEEXkNthyAAhl+P
Z1oTR2PSs4ZUlYdb3MQrt7XoKeEOqCHHxoHB3gsj+75Jnc/aAbM+hb13imAJ
iwARAQABwsB2BBgBCAAJBQJg4It4AhsMACEJEGHAYnRSOf5GFiEExGMJvxnV
v1dXecI0YcBidFI5/kZYSQgAop0OsPV11O/fzbZ+oEabC3Ye9hNGapJQNdmJ
MJkiJg7Hnl1FO4MDtHK5OJ4YePFAqtlKRDIBCALPiN0E2v9+3yAafs6TQDc9
Lg3iIPDOnrXv7J7pv2WPnnue4o8Gkggpa+wEjbQJcUBLX311xJGBG4pSNIVN
FJcsl1fGoaxXB5ANPy/+UNMv0l/7cQWDzSw8V9WH10SO2Q4dQF7Zxw+UgBdb
mRVXWNHkcTs81WA/hYtAuLw0O5Q1QWfbXzlTJGNPy/lMMsxLF6La8fBPHlE0
CjYd4ZH9HgOvpCACjRtbc1jywaZJEisO2aJcO2BaozSzYUmkr5sH2wjSKcMS
nLviCw==
=Wg0i
-----END PGP PUBLIC KEY BLOCK-----`
The code is:
const { BlobServiceClient } = require('#azure/storage-blob');
// const { v1: uuidv1 } = require('uuid');
// const stream = require('stream').promises
const openpgp = require('openpgp');
// import * as openpgp from 'openpgp'
const { publicKey } = require('./keys')
async function main() {
const AZURE_STORAGE_CONNECTION_STRING = process.env.AZURE_STORAGE_CONNECTION_STRING;
const blobServiceClient = BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
const containerClient = blobServiceClient.getContainerClient("uploadebs");
const containerEncryptedFiles = blobServiceClient.getContainerClient("encrypted-dide");
await containerEncryptedFiles.createIfNotExists("encrypted-dide")
// console.log(await openpgp.readKey({ armoredKey: publicKey })) <- THIS WORKS!
for await (const blob of containerClient.listBlobsFlat()) {
if (blob.name.match('^DIDE*')) {
const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
const encryptedblockBlobClient = containerEncryptedFiles.getBlockBlobClient(blob.name)
blockBlobClient.download(0)
.then(downloadBlockBlobResponse => streamToString(downloadBlockBlobResponse.readableStreamBody))
.then(blobAsString => openpgp.encrypt({
message: openpgp.createMessage({ text: blobAsString }), // input as Message object
publicKeys: openpgp.readKey({ armoredKey: publicKey }),
}))
// BELOW LINE, SENDS TEXT IN BLOBS, ENCRYPTED OR NOT THROUGH FUNC UPLOAD
.then(encrypted => {encryptedblockBlobClient.upload(encrypted, encrypted.length)})
}
}
}
async function streamToString(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on("data", (data) => {
chunks.push(data.toString());
});
readableStream.on("end", () => {
resolve(chunks.join(""));
});
readableStream.on("error", reject);
});
}
main().then(() => console.log('Done')).catch((ex) => console.log(ex.message));
openpgp.createMessage returns a Promise. So you need to do .then or add await before it.
Same with penpgp.readKey. It is also a promise.
For example from the Doc:
const publicKey = await openpgp.readKey({ armoredKey: publicKeyArmored });
const encrypted = await openpgp.encrypt({
message: await openpgp.createMessage({ text: 'Hello, World!' }), // input as Message object
publicKeys: publicKey, // for encryption
privateKeys: privateKey // for signing (optional)
});
EDIT2:
Without using await.
.then(blobAsString => {
return Promise.all([openpgp.createMessage({ text: blobAsString }), openpgp.readKey({ armoredKey: publicKey })])
.then(([message, publicKeys ])=>{
return openpgp.encrypt({
message,
publicKeys,
});
});
})
Used like this:
.then(blobAsString => {
return Promise.all([openpgp.createMessage({ text: blobAsString }), openpgp.readKey({ armoredKey: publicKey })])
.then(([message, publicKeys ])=>{
return openpgp.encrypt({
message,
publicKeys,
})
})
.then(encrypted => {encryptedblockBlobClient.upload(encrypted, encrypted.length)});;
})
Related
Does it matter from where I am calling the API, for example, the region of my KMS is the US but currently I'm calling it from the EU. Does it somehow affect decryption, coz I'm encrypting data fine but decryption gives random output? Other than region I'm not sure if something else is causing the issue, please have a look and any answer is appreciated.
Here is my code for reference:
import { DecryptCommandInput, KMS } from "#aws-sdk/client-kms";
import util from 'util'
import { kmsConfig } from "./constants";
export const region = kmsConfig.region;
export const kms = new KMS({
region: region,
apiVersion: "2014-11-01",
credentials: {
accessKeyId: kmsConfig.accesKeyId,
secretAccessKey: kmsConfig.secretAccessKey,
},
// important for react-native
endpoint: {
hostname: "kms." + region +".amazonaws.com",
path: "",
protocol: "https",
}
});
export async function kmsEncryption(data) {
// a client can be shared by different commands.
try {
let encryptionParams = {
KeyId: kmsConfig.arn,
Plaintext: data,
};
let kmsEncrypt = util.promisify(kms.encrypt).bind(kms);
let encryptedData = await kmsEncrypt(encryptionParams);
//encryptedData contained 2 parts, CiphertextBlob and KeyId
console.log("Encrypted");
return encryptedData;
} catch (error) {
console.log("\nerror => \n", error);
}
}
export const kmsDecryption = async (encryptedData: any) => {
try {
let buff = Buffer.from(encryptedData.CiphertextBlob);
let encryptedBase64data = buff.toString("base64");
console.log("\nencryptedBase64data => \n", encryptedBase64data);
let decryptionParams:DecryptCommandInput = {
CiphertextBlob: encryptedData.CiphertextBlob,
};
let kmsDecrypt = util.promisify(kms.decrypt).bind(kms);
let decryptedData = await kmsDecrypt(decryptionParams);
// decryptedData contained 2 parts, Plaintext and KeyId
console.log("\ndecryptedData => \n", decryptedData);
console.log("\ndecryptedData.Plaintext => \n", decryptedData.Plaintext);
console.log("\ndecryptedData.KeyId => \n", decryptedData.KeyId);
let buff2 = Buffer.from(decryptedData.Plaintext, "base64");
let originalText = buff2.toString();
console.log("\noriginalText => \n", originalText);
return originalText;
} catch (error) {
console.log("\ndecrypt error => \n", error);
}
}
let encode = await kmsEncryption("helloword1234");
let decode = await kmsDecryption(encode);
I am trying to open a file from a s3 bucket using angular as a pdf. To do this, I have a node service running which gets the object, which I call from angular. Then I'm trying to open in angular as a pdf. Is there something I am missing? When I open the PDF, it shows up as a blank (white) document.
Below is my node code:
const streamToString = (stream) =>
new Promise((resolve, reject) => {
const chunks = [];
stream.on("data", (chunk) => chunks.push(chunk));
stream.on("error", reject);
stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf8")));
});
const readFile = async function getObj(key) {
const params = {
Bucket: vBucket,
Key: key,
};
const command = new GetObjectCommand(params);
const response = await client.send(command);
const { Body } = response;
return streamToString(Body);
};
And here I am consuming in angular and opening as PDF
The service:
getObj(key: String): Observable<any>{
const httpOptions = {
'responseType' : 'arraybuffer' as 'json'
//'responseType' : 'blob' as 'json' //This also worked
};
return this.http.get<any>(environment.s3Ep + '/getfile?key=' + key, httpOptions );
}
And code consuming the service:
this.s3Svc.getObj(key).subscribe(
res => {
let file = new Blob([res], {type: 'application/pdf'});
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
}
);
I started experiencing the same issue. Found a solution, replacing streamToString with streamToBuffer as follows:
const streamToBuffer = async (stream: Readable): Promise<Buffer> => {
return new Promise((resolve, reject) => {
const chunks: Array<any> = []
stream.on('data', (chunk) => chunks.push(chunk))
stream.on('error', reject)
stream.on('end', () => resolve(Buffer.concat(chunks)))
})
}
and the code that consumes it:
const command = new GetObjectCommand({ Bucket, Key })
const data = await s3.send(command)
const content = await streamToBuffer(data.Body as Readable)
fs.writeFileSync(destPath, content)
In my case I'm writing to a PDF file.
Writing as a string retrieved from streamToString or writing it as buffer.toString() resulted in the blank PDF.
I'm trying to Stream JSON from MongoDB to S3 with the new version of #aws-sdk/lib-storage:
"#aws-sdk/client-s3": "^3.17.0"
"#aws-sdk/lib-storage": "^3.34.0"
"JSONStream": "^1.3.5",
Try #1: It seems that I'm not using JSONStream.stringify() correctly:
import { MongoClient } from 'mongodb';
import { S3Client } from '#aws-sdk/client-s3';
import { Upload } from '#aws-sdk/lib-storage';
const s3Client = new S3Client({ region: env.AWS_REGION });
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName).find('{}').limit(5).stream();
readStream.pipe(JSONStream.stringify());
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: readStream,
},
});
await upload.done();
}
catch (err) {
log.error(err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
Error #1:
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be one of
type string, Buffer, ArrayBuffer, Array, or Array-like Object.
Received type object
at Function.from (buffer.js:305:9)
at getDataReadable (/.../node_modules/#aws-sdk/lib-storage/src/chunks/getDataReadable.ts:6:18)
at processTicksAndRejections (internal/process/task_queues.js:94:5)
at Object.getChunkStream (/.../node_modules/#aws-sdk/lib-storage/src/chunks/getChunkStream.ts:17:20)
at Upload.__doConcurrentUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:121:22)
at async Promise.all (index 0)
at Upload.__doMultipartUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:196:5)
at Upload.done (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:88:12)
Try #2, using the variable jsonStream:
const readStream = db.collection(collectionName).find('{}').limit(5).stream();
const jsonStream = readStream.pipe(JSONStream.stringify());
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: jsonStream,
},
});
Error #2:
ReferenceError: ReadableStream is not defined
at Object.getChunk (/.../node_modules/#aws-sdk/lib-storage/src/chunker.ts:22:30)
at Upload.__doMultipartUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:187:24)
at Upload.done (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:88:37)
Try #3: use stream.PassThrough:
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName).find('{}').limit(5).stream();
readStream.pipe(JSONStream.stringify()).pipe(uploadStreamFile('benda_mongo.json'));
...
const stream = require('stream');
export const uploadStreamFile = async(fileName) => {
try{
const pass = new stream.PassThrough();
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: pass,
},
});
const res = await upload.done();
log.info('finished uploading file', fileName);
return res;
}
catch(err){
return;
}
};
Error #3:
'dest.on is not a function at Stream.pipe (internal/streams/legacy.js:30:8'
Try #4: mongodb.stream({transform: doc => JSON.stringify...}) instead of JSONStream:
import { S3Client } from '#aws-sdk/client-s3';
import { Upload } from '#aws-sdk/lib-storage';
import { env } from '../../../env';
const s3Client = new S3Client({ region: env.AWS_REGION });
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName)
.find('{}')
.limit(5)
.stream({ transform: doc => JSON.stringify(doc) + '\n' });
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: readStream,
},
});
await upload.done();
}
catch (err) {
log.error('waaaaa', err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
Error: #4:
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be one of
type string, Buffer, ArrayBuffer, Array, or Array-like Object.
Received type object
at Function.from (buffer.js:305:9)
at getDataReadable (/.../node_modules/#aws-sdk/lib-storage/src/chunks/getDataReadable.ts:6:18)
at processTicksAndRejections (internal/process/task_queues.js:94:5)
at Object.getChunkStream (/.../node_modules/#aws-sdk/lib-storage/src/chunks/getChunkStream.ts:17:20)
at Upload.__doConcurrentUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:121:22)
at async Promise.all (index 0)
at Upload.__doMultipartUpload (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:196:5)
at Upload.done (/.../node_modules/#aws-sdk/lib-storage/src/Upload.ts:88:12)
Try #5: using stream.PassThrough() and return pass to pipe:
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName).find('{}').limit(5).stream({ transform: doc => JSON.stringify(doc) + '\n' });
readStream.pipe(uploadStreamFile());
}
catch (err) {
log.error('waaaaa', err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
const stream = require('stream');
export const uploadStreamFile = async() => {
try{
const pass = new stream.PassThrough();
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: pass,
},
});
await upload.done();
return pass;
}
catch(err){
log.error('pawoooooo', err);
return;
}
};
Error #5:
TypeError: dest.on is not a function
at Cursor.pipe (_stream_readable.js:680:8)
After reviewing your error stack traces, probably the problem has to do with the fact that the MongoDB driver provides a cursor in object mode whereas the Body parameter of Upload requires a traditional stream, suitable for be processed by Buffer in this case.
Taking your original code as reference, you can try providing a Transform stream for dealing with both requirements.
Please, consider for instance the following code:
import { Transform } from 'stream';
import { MongoClient } from 'mongodb';
import { S3Client } from '#aws-sdk/client-s3';
import { Upload } from '#aws-sdk/lib-storage';
const s3Client = new S3Client({ region: env.AWS_REGION });
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const readStream = db.collection(collectionName).find('{}').limit(5).stream();
// We are creating here a Transform to adapt both sides
const toJSONTransform = new Transform({
writableObjectMode: true,
transform(chunk, encoding, callback) {
this.push(JSON.stringify(chunk) + '\n');
callback();
}
});
readStream.pipe(toJSONTransform);
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: 'extracted-data/benda_mongo.json',
Body: toJSONTransform,
},
});
await upload.done();
}
catch (err) {
log.error(err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
In the code, in toJSONTransform we are defining the writable part of the stream as object mode; in contrast, the readable part will be suitable for being read from the S3 Upload method... at least, I hope so.
Regarding the second error you reported, the one related with dest.on, I initially thought, and I wrote you about the possibility, that the error was motivated because in uploadStreamFile you are returning a Promise, not a stream, and you are passing that Promise to the pipe method, which requires a stream, basically that you returned the wrong variable. But I didn't realize that you are trying passing the PassThrough stream as a param to the Upload method: please, be aware that this stream doesn't contain any information because you are not passing any information to it, the contents of the readable stream obtained from the MongoDB query are never passed to the callback nor the Upload itself.
I found additional solution using stream.PassThrough, using JSONStream will stream array of objects instead of one after the other:
export const uploadMongoStreamToS3 = async (connectionString, collectionName) => {
let client;
try {
client = await MongoClient.connect(connectionString);
const db = client.db();
const passThroughStream = new stream.PassThrough();
const readStream = db.collection(collectionName)
.find('{}')
.stream();
readStream.on('end', () => passThroughStream.end());
readStream.pipe(JSONStream.stringify()).pipe(passThroughStream);
await uploadStreamFile('benda_mongo.json', passThroughStream);
}
catch (err) {
log.error(err);
throw err.name;
}
finally {
if (client) {
client.close();
}
}
};
export const uploadStreamFile = async(fileName, stream) => {
try{
log.info('start uploading file', fileName);
const upload = new Upload({
client: s3Client,
params: {
Bucket: 'test-bucket',
Key: `${fileName}`,
Body: stream,
},
});
const res = await upload.done();
log.info('finished uploading file', fileName);
return res;
}
catch(err){
log.error(err);
return;
}
};
I want to encrypt some blobs in azure and I am using Streams API.
I am reading from a blob storage from a container and trying to push the blobs encrypted in other container.
Despite the fact that the instance of new ReadableStream() is recognized, I get logged the error "ReadableStream is not defined"
Any help on this, is more than welcome!
The code
const { BlobServiceClient } = require('#azure/storage-blob');
const openpgp = require('openpgp');
const {privateKey, publicKey} = require('./keys')
async function main() {
const AZURE_STORAGE_CONNECTION_STRING = process.env.AZURE_STORAGE_CONNECTION_STRING;
const blobServiceClient = BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
const containerClient = blobServiceClient.getContainerClient("uploadebs");
const containerEncryptedFiles = blobServiceClient.getContainerClient("encrypted-dide");
await containerEncryptedFiles.createIfNotExists("encrypted-dide")
// RSA keys
// console.log(`private key => ${privateKey}`)
// console.log(`public key => ${publicKey}`)
// .then((keyPair) => {console.log(`${JSON.stringify(keyPair)}` )}).catch((error) => console.log(`error in generating the keys: ${error}`));
for await (const blob of containerClient.listBlobsFlat()) {
if (blob.name.match('^DIDE*')) {
const blockBlobClient = containerClient.getBlockBlobClient(blob.name);
const encryptedblockBlobClient = containerEncryptedFiles.getBlockBlobClient(blob.name)
const downloadBlockBlobResponse = await blockBlobClient.download(0);
let blobData = await streamToString(downloadBlockBlobResponse.readableStreamBody)
const readableStream = new ReadableStream({
start(controller) {
controller.enqueue(blobData);
controller.close();
}
});
const encrypted = await openpgp.encrypt({
message: await openpgp.createMessage({ text: readableStream }), // input as Message object
publicKeys: publicKey, // for encryption
privateKeys: privateKey // for signing (optional)
});
console.log(encrypted);
await encryptedblockBlobClient.upload(encrypted, encrypted.length)// the blob should be already crypted
}
}
}
async function streamToString(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on("data", (data) => {
chunks.push(data.toString());
});
readableStream.on("end", () => {
resolve(chunks.join(""));
});
readableStream.on("error", reject);
});
}
main().then(() => console.log('Done')).catch((ex) => console.log(ex.message));
package.json
{ .
.
.
"dependencies": {
"#azure/storage-blob": "^12.0.0",
"#types/dotenv": "^4.0.3",
"dotenv": "^6.0.0",
"node": "^16.4.0",
"openpgp": "^5.0.0-3"
}
I have a code snippet in php which I would like to move into node.js but I cannot seem to find the right way to do it.
class EncryptService
{
const PUBLIC_CERT_PATH = 'cert/public.cer';
const PRIVATE_CERT_PATH = 'cert/private.key';
const ERROR_LOAD_X509_CERTIFICATE = 0x10000001;
const ERROR_ENCRYPT_DATA = 0x10000002;
public $outEncData = null;
public $outEnvKey = null;
public $srcData;
public function encrypt()
{
$publicKey = openssl_pkey_get_public(self::PUBLIC_CERT_PATH);
if ($publicKey === false) {
$publicKey = openssl_pkey_get_public("file://".self::PUBLIC_CERT_PATH);
}
if ($publicKey === false) {
$errorMessage = "Error while loading X509 public key certificate! Reason:";
while (($errorString = openssl_error_string())) {
$errorMessage .= $errorString . "\n";
}
throw new Exception($errorMessage, self::ERROR_LOAD_X509_CERTIFICATE);
}
$publicKeys = array($publicKey);
$encData = null;
$envKeys = null;
$result = openssl_seal($this->srcData, $encData, $envKeys, $publicKeys);
if ($result === false)
{
$this->outEncData = null;
$this->outEnvKey = null;
$errorMessage = "Error while encrypting data! Reason:";
while (($errorString = openssl_error_string()))
{
$errorMessage .= $errorString . "\n";
}
throw new Exception($errorMessage, self::ERROR_ENCRYPT_DATA);
}
$this->outEncData = base64_encode($encData);
$this->outEnvKey = base64_encode($envKeys[0]);
}
};
The problem is that I cannot find an implementation of the openssl_sign in Javascript anywhere. I do need to keep this structure because I use both outEncData and outEnvKey.
I managed to find the equivalent implementation of openssl_sign with the crypto package but nothing for openssl_seal.
LE added working solution as an answer
OK I've spent some time to figure this out, in short it is now in the repo: ivarprudnikov/node-crypto-rc4-encrypt-decrypt. But we want to follow SO rules here.
Below assumes that you have public key for signing the generated key and private key for testing if all is great.
Randomly generated secret key used for encryption:
const crypto = require('crypto');
const generateRandomKeyAsync = async () => {
return new Promise((resolve, reject) => {
crypto.scrypt("password", "salt", 24, (err, derivedKey) => {
if (err) reject(err);
resolve(derivedKey.toString('hex'));
});
});
}
Encrypt data with the generated key and then encrypt that key with a given public key. We want to send back both encrypted details and encrypted key as we expect the user on another side to have private key.
const crypto = require('crypto');
const path = require('path');
const fs = require('fs');
const encryptKeyWithPubAsync = async (text) => {
return new Promise((resolve) => {
fs.readFile(path.resolve('./public_key.pem'), 'utf8', (err, publicKey) => {
if (err) throw err;
const buffer = Buffer.from(text, 'utf8');
const encrypted = crypto.publicEncrypt(publicKey, buffer);
resolve(encrypted.toString('base64'));
});
});
}
const encryptStringAsync = async (clearText) => {
const encryptionKey = await generateRandomKeyAsync();
const cipher = await crypto.createCipheriv("RC4", encryptionKey, null);
const encryptedKey = await encryptKeyWithPubAsync(encryptionKey);
return new Promise((resolve, reject) => {
let encryptedData = '';
cipher.on('readable', () => {
let chunk;
while (null !== (chunk = cipher.read())) {
encryptedData += chunk.toString('hex');
}
});
cipher.on('end', () => {
resolve([encryptedKey, encryptedData]); // return value
});
cipher.write(clearText);
cipher.end();
});
}
So now we can encrypt the details:
encryptStringAsync("foo bar baz")
.then(details => {
console.log(`encrypted val ${details[1]}, encrypted key ${details[0]}`);
})
Will print something like:
encrypting foo bar baz
encrypted val b4c6c7a79712244fbe35d4, encrypted key bRnxH+/pMEKmYyvJuFeNWvK3u4g7X4cBaSMnhDgCI9iii186Eo9myfK4gOtHkjoDKbkhJ3YIErNBHpzBNc0rmZ9hy8Kur8uiHG6ai9K3ylr7sznDB/yvNLszKXsZxBYZL994wBo2fI7yfpi0B7y0QtHENiwE2t55MC71lCFmYtilth8oR4UjDNUOSrIu5QHJquYd7hF5TUtUnDtwpux6OnJ+go6sFQOTvX8YaezZ4Rmrjpj0Jzg+1xNGIIsWGnoZZhJPefc5uQU5tdtBtXEWdBa9LARpaXxlYGwutFk3KsBxM4Y5Rt2FkQ0Pca9ZZQPIVxLgwIy9EL9pDHtm5JtsVw==
To test above assumptions it is necessary first to decrypt the key with the private one:
const decryptKeyWithPrivateAsync = async (encryptedKey) => {
return new Promise((resolve) => {
fs.readFile(path.resolve('./private_key.pem'), 'utf8', (err, privateKey) => {
if (err) throw err;
const buffer = Buffer.from(encryptedKey, 'base64')
const decrypted = crypto.privateDecrypt({
key: privateKey.toString(),
passphrase: '',
}, buffer);
resolve(decrypted.toString('utf8'));
});
});
}
After key is decrypted it is possible to decrypt the message:
const decryptWithEncryptedKey = async (encKey, encVal) => {
const k = await decryptKeyWithPrivateAsync(encKey);
const decipher = await crypto.createDecipheriv("RC4", k, null);
return new Promise((resolve, reject) => {
let decrypted = '';
decipher.on('readable', () => {
while (null !== (chunk = decipher.read())) {
decrypted += chunk.toString('utf8');
}
});
decipher.on('end', () => {
resolve(decrypted); // return value
});
decipher.write(encVal, 'hex');
decipher.end();
});
}
Hope this answers the question.
The final and working version that worked for me. My problem was that I used an 128bit random key encrypt the data, instead 256bit worked in the end.
The encryption works in JS and it can be decrypted in php with the openssl_open using your private key, which was what I asked in the original question.
const crypto = require('crypto');
const path = require('path');
const fs = require('fs');
const encryptMessage = (message) => {
const public_key = fs.readFileSync(`${appDir}/certs/sandbox.public.cer`, 'utf8');
const rc4Key = Buffer.from(crypto.randomBytes(32), 'binary');
const cipher = crypto.createCipheriv('RC4', rc4Key, null);
let data = cipher.update(message, 'utf8', 'base64');
cipher.final();
const encryptedKey = crypto.publicEncrypt({
key: public_key,
padding: constants.RSA_PKCS1_PADDING
}, rc4Key);
return {
'data': data,
'env_key': encryptedKey.toString('base64'),
};
};