Read images with node.js from URL - javascript

Please excuse my ignorance on node.
I need to read an image through a url and resize it through sharp.
Currently I have it like this to read local.
For example.
I want to read this image
url= "https://miami.pfsrealty.com/wp-content/uploads/2020/02/Miami-y-su-bahia-con-nubes-al-atardecer-Compressed.jpg"
And my current code is this.
return new Promise(async (resolve, reject) => {
const fileSystem = require('fs');
const image = fileSystem.readFileSync(directoryPath, 'base64');
const sharp = require('sharp');
const height: number = parseInt(heightString);//parameter
const width: number = parseInt(widthString);//parameter
let img = new Buffer(image, 'base64');
await sharp(img)
.resize(height, width)
.toBuffer()
.then(resizedImageBuffer => {
const resizedImageData = resizedImageBuffer.toString('base64');
resolve(resizedImageData);
})
.catch(error => {
// error handeling
reject(error);
});
});
How should the call be?
Thanks !

try this
const sharp = require('sharp');
const fs = require('fs');
function readAndSave(url, width = 300, height = 300) {
const filename = url.replace(/^.*[\\\/]/, '');
require('axios').get(url, { responseType: 'arraybuffer' })
.then((response) => {
return Buffer.from(response.data, "utf-8")
}).then((buffer) => {
return new Promise((resolve, reject) => {
sharp(buffer)
.resize(height, width)
.toBuffer()
.then(resizedImageBuffer => {
const resizedImageData = resizedImageBuffer.toString('base64');
const buf = Buffer.from(resizedImageData, 'base64');
fs.writeFile(`./${filename}`, buf, function (err) {
if (err) throw err;
});
resolve()
})
.catch(error => {
// error handeling
reject(error);
});
})
}).catch(error => {
console.log('error', error)
});
}
readAndSave('https://miami.pfsrealty.com/wp-content/uploads/2020/02/Miami-y-su-bahia-con-nubes-al-atardecer-Compressed.jpg');

Related

How to get image dimensions on Cloudflare Workers?

I'm trying to create a Cloudflare Worker that receives an image URL and return width and height. But I receive the message ReferenceError: Image is not defined for new Image(). Is there a workaround to make the code below work?
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request))
})
async function handleRequest(request) {
let imageUrl = new URL(request.url).searchParams.get('imageUrl')
let response = await fetch(imageUrl)
let blob = await response.blob()
let image = new Image() // <------ ReferenceError: Image is not defined
return new Promise((resolve, reject) => {
image.onload = function() {
resolve(new Response(JSON.stringify({
width: image.width,
height: image.height
}), {
headers: {
'content-type': 'application/json'
}
}))
}
image.onerror = reject
image.src = URL.createObjectURL(blob)
})
}
It seems other functions for images have a similar problem. Example: ReferenceError: createImageBitmap is not defined. So any possible solution would be great.
Image are only available in the browse.
If you want to get the width and height of an image in node.js try this
const http = require('http');
const fs = require('fs');
const gm = require('gm');
async function handleRequest() {
const filename = "image.png"
const fileURL = "https://any-page.dom/"+filename
const file = fs.createWriteStream(filename);
return new Promise((resolve, reject) => {
const request = http.get(fileURL, function(response) {
response.pipe(file);
// after download completed close filestream
file.on("finish", () => {
console.log("Download Completed");
file.close();
gm(filename)
.size(function (err, size) {
if (!err) {
console.log('width = ' + size.width);
console.log('height = ' + size.height);
resolve(new Response(JSON.stringify({
width: size.width,
height: size.height
}), {
headers: {
'content-type': 'application/json'
}
}))
} else {
reject(err)
}
});
});
});
})
}

How to create a broken stream for a test?

I have this function and I want to test it. I want to test stream.on('error', err => reject(err)); line, but don't know how to reach it. What could I input in this function to trigger the error throw? Thank you!
function streamToString(stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', chunk => chunks.push(Buffer.from(chunk)));
stream.on('error', err => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
});
}
If you are in nodejs and using jest you could Emit the error by your self like:
function streamToString(stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on("data", (chunk) => chunks.push(Buffer.from(chunk)));
stream.on("error", (err) => reject(err));
stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf8")));
});
}
test("broken stream", () => {
let fs = require("fs");
let stream = fs.createReadStream("file.txt");
let res = streamToString(stream);
stream.emit("error", "OH NO!");
return expect(res).rejects.toMatch("OH NO!");
});
//or
test("broken stream", () => {
let fs = require("fs");
let stream = fs.createReadStream("file.txt");
let res = streamToString(stream);
stream.destroy("OH NO!");
return expect(res).rejects.toMatch("OH NO!");
});

Send ArrayBuffer to S3 put to signedURL

I am progressively loading a file into a buffer, the buffer is valid, but the browser crashes when the ArrayBuffer is finished loading the file into it. What I need to do is to be able to send the pieces of the buffer buf = this.concatBuffers(buf, buffer); to the axios PUT request so I can progressively upload the file to s3, rather than load it into a single variable returned by the promise (as the memory gets exceeded).
How do I modify the link between readFileAsBuffer and the uploadFileToS3 method to do this?
This is my code so you can follow the process.
concatTypedArrays = (a, b) => {
const c = new a.constructor(a.length + b.length);
c.set(a, 0);
c.set(b, a.length);
return c;
};
concatBuffers = (a, b) =>
this.concatTypedArrays(
new Uint8Array(a.buffer || a),
new Uint8Array(b.buffer || b),
).buffer;
readFileAsBuffer = file =>
new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.file = file;
let buf = new ArrayBuffer();
const fileChunks = new FileChunker(file, 2097152);
fileReader.readAsArrayBuffer(fileChunks.blob());
fileReader.onload = e => {
this.onProgress(fileChunks);
const buffer = e.target.result;
buf = this.concatBuffers(buf, buffer);
if (fileChunks.hasNext()) {
fileChunks.next();
fileReader.readAsArrayBuffer(fileChunks.blob());
return;
}
resolve(buf);
};
fileReader.onerror = err => {
reject(err);
};
});
uploadFileToS3 = fileObject => {
new Promise((resolve, reject) => {
const decodedURL = decodeURIComponent(fileObject.signedURL);
this.readFileAsBuffer(fileObject.fileRef).then(fileBuffer => {
console.log(fileBuffer);
axios
.put(decodedURL, fileBuffer, {
headers: {
'Content-Type': fileObject.mime,
'Content-MD5': fileObject.checksum,
'Content-Encoding': 'UTF-8',
'x-amz-acl': 'private',
},
onUploadProgress: progressEvent => {
const { loaded, total } = progressEvent;
const uploadPercentage = parseInt(
Math.round((loaded * 100) / total),
10,
);
this.setState({ uploadProgress: uploadPercentage });
console.log(`${uploadPercentage}%`);
if (uploadPercentage === 100) {
console.log('complete');
}
},
})
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
});
};
uploadAllFilesToS3 = () => {
const { files } = this.state;
new Promise((resolve, reject) => {
Object.keys(files).map(idx => {
this.uploadFileToS3(files[idx])
.then(response => {
this.setState({ files: [] });
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
});
};
calcFileMD5 = file =>
new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.file = file;
const spark = new SparkMD5.ArrayBuffer();
const fileChunks = new FileChunker(file, 2097152);
fileReader.readAsArrayBuffer(fileChunks.blob());
fileReader.onload = e => {
this.onProgress(fileChunks);
const buffer = e.target.result;
spark.append(buffer);
if (fileChunks.hasNext()) {
fileChunks.next();
fileReader.readAsArrayBuffer(fileChunks.blob());
return;
}
const hash = spark.end();
const checksumAWS = Buffer.from(hash, 'hex').toString('base64');
resolve(checksumAWS);
};
fileReader.onerror = err => {
reject(err);
};
});
I ended up not needing to create my own Buffer of the file, instead if I post the fileReference returned by the input directly to axios (or xhr) the request automatically chunked the upload.
Initially I could only make it work with XMLHttpRequest, but I quickly found a way to wrap this around axios which neatens the logic.
XMLHttpRequest
const xhr = createCORSRequest('PUT', url);
if (!xhr) {
console.log('CORS not supported');
} else {
xhr.onload = function(){
if(xhr.status == 200) {
console.log('completed');
} else {
console.log('Upload error: ' + xhr.status);
}
};
xhr.onerror = function(err) {
console.log(err)
};
xhr.upload.onprogress = function(progressEvent){
console.log(progressEvent);
};
xhr.setRequestHeader('Content-Type', file.type);
xhr.setRequestHeader('Content-MD5', md5_base64_binary);
xhr.setRequestHeader('Content-Encoding', 'UTF-8');
xhr.setRequestHeader('x-amz-acl', 'private');
xhr.send(file);
}
Or using axios;
uploadFileToS3 = fileObject => {
return new Promise((resolve, reject) => {
const { enqueueSnackbar } = this.props;
const decodedURL = decodeURIComponent(fileObject.signedURL);
axios
.put(decodedURL, fileObject.fileRef, {
headers: {
'Content-Type': fileObject.mime,
'Content-MD5': fileObject.checksum,
'Content-Encoding': 'UTF-8',
'x-amz-acl': 'private',
},
onUploadProgress: progressEvent => {
const { loaded, total } = progressEvent;
const uploadPercentage = parseInt(
Math.round((loaded * 100) / total),
10,
);
this.setState({ uploadProgress: uploadPercentage });
},
})
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
};
Have you tried uploading your file using formData? Let the browser deal with file reading.
const data = new FormData()
data.append('file', file)
axios.put(decodedURL, data, ....)
Another option is to use axios https://github.com/axios/axios#request-config transformRequest property. And call for file reading there.

NODE.JS : How to make sure a reading stream has ended and the data written?

so I am new to async/await on node.js and I could use some help figuring out this code.
I'm trying to get a file from a ftp server via the 'ftp' package, to write the data into a local 'data.txt' and to open it later in the code. My problem is that I don't understand how to make sure the file is completely written in the 'data.txt' before trying to open it with fs.readFileSync().
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient();
FtpClient.on('ready', async function() {
await new Promise(resolve =>
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err;
stream.once('close', () => {FtpClient.end();});
// Stream written in data.txt
const Streampipe = stream.pipe(fs.createWriteStream('data.txt')).on('finish', resolve)
})
)
})
FtpClient.connect(ConfigFTP);
var Data = fs.readFileSync('data.txt', 'utf8');
I'm not sure what you want to accomplish, but you can do something like these:
1)
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient()
let writeStream = fs.createWriteStream('data.txt')
FtpClient.on('ready', async function () {
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err
stream.once('close', () => { FtpClient.end() })
// Stream written in data.txt
const Streampipe = stream.pipe(writeStream)
})
})
FtpClient.connect(ConfigFTP)
writeStream.on('finish', () => {
var Data = fs.readFileSync('data.txt', 'utf8')
})
2)
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient()
FtpClient.on('ready', async function() {
await new Promise(resolve =>
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err
stream.once('close', () => {FtpClient.end()})
// Stream written in data.txt
const Streampipe = stream.pipe(fs.createWriteStream('data.txt')).on('finish', resolve)
})
)
var Data = fs.readFileSync('data.txt', 'utf8')
})
FtpClient.connect(ConfigFTP)

How to decrypt a string in NodeJS

I am currently working on a project that takes incoming messages from Amazons SQS (Simple Queue Service).
I have established the connection to the SQS service and receiving messages.
The receiving message is in the following format:
{ MessageId: '124a42b-657d-481d-348f-ddd9b8d8b143b',
ReceiptHandle: 'AQEBSSVzlCbqsSUQ3E.....',
MD5OfBody: '7ba46c7c8874fc6d0c4a141a2d3d4d5a721',
Body: '10987235#PUBLISH;aSeIgjS78Iy4KRZHSeAy43...' }
The Message Body is encrypted, I been given the password to decrypt it.
My question is how do I decrypt the message Body in nodeJS?
Any examples would be much appreciated!
UPDATE:
Here is a copy of my code so far:
const Consumer = require('sqs-consumer');
const AWS = require('aws-sdk');
const fs = require('fs');
const path = require("path");
const unzipper = require('unzipper');
const app = Consumer.create({
queueUrl: 'https://sqs.us-west-1.amazonaws.com/QueueName',
handleMessage: (message, done) => {
saveToFile(message.Body)
.then(() => {
return unzipFile();
})
.then((success) => {
// read content of XML file and save to DB
console.log('success:', success);
})
.catch(e => {
console.log('Error:', e);
});
},
sqs: new AWS.SQS()
});
app.on('error', (err) => {
console.log(err.message);
});
app.start();
const saveToFile = (message) => {
debugger;
console.log('saveToFile fired!');
return new Promise((resolve, reject) => {
fs.appendFile(path.resolve(__dirname) + '/zips/decodeMsg.zip', message, (err) => {
if (err) reject(error);
resolve();
});
});
}
const unzipFile = () => {
return unzipper.Open.file(path.resolve(__dirname) + '/zips/decodeMsg.zip') // Fails To Read The File Here.
.then(function (directory) {
console.log('directory', directory);
return new Promise(function (resolve, reject) {
directory.files[0].stream(password) // PASSING IN PASSWORD HERE
.pipe(fs.createWriteStream('readablefile')) // should be XML
.on('error', reject)
.on('finish', resolve)
});
});
}
Update again:
Okay here I am trying to just decrypt the message Body with the following code.
var crypto = require("crypto")
require('./config');
function decrypt(key, data) {
var decipher = crypto.createDecipher('aes-256-cbc', key);
var decrypted = decipher.update(data, 'hex', 'utf-8');
decrypted += decipher.final('utf-8');
return decrypted;
}
decryptedText = decrypt(process.env.password, process.env.encryptedMessage);
console.log("Decrypted Text: " + decryptedText);
However I am getting the following error in the console:
var ret = this._handle.update(data, inputEncoding);
^
TypeError: Bad input string
at TypeError (native)
at Decipher.Cipher.update (crypto.js:145:26)
at decrypt (/Users/dave/Tests/sqs-consumer/app.js:6:34)

Categories

Resources