how to embed an image in a JSON response - javascript

I'm using Jimp to read in a JSON string that looks like this:
As you can see the image node is a base64-encoded JPEG.
I'm able to succesfully convert it to a TIFF and save it:
Jimp.read(Buffer.from(inputImage, "base64"), function(err, image) {
image.getBuffer(Jimp.MIME_TIFF, function(error, tiff) {
context.bindings.outputBlob = tiff
...}
However, when I attempted to embed the tiff inside of a JSON object, the TIFF gets all garbled up:
const response = {
image: tiff.toString('base64'),
correlation: correlation
};
context.bindings.outputBlob = response;
Here's the full code:
const Jimp = require("jimp");
module.exports = function(context, myBlob) {
const correlation = context.bindings.inputBlob.correlation;
const inputImage = context.bindings.inputBlob.image;
const imageName = context.bindings.inputBlob.imageName;
context.log(
correlation + "Attempting to convert this image to a tiff: " + imageName
);
Jimp.read(Buffer.from(inputImage, "base64"), function(err, image) {
image.getBuffer(Jimp.MIME_TIFF, function(error, tiff) {
const response = {
image: tiff.toString('base64'),
correlation: correlation
};
context.bindings.outputBlob = response;
context.log(
correlation + "Succesfully converted " + imageName + " to tiff."
);
context.done();
});
});
};
How do we embed the tiff inside of a JSON payload?
If this output is non-negotiable, how would I render the tiff from the saved payload?

Well since you confirmed you are looking for output with context.res here is my working sample.. note that there is a maximum response size, so you can't return every image/file the way I am returning the image here
const Jimp = require('jimp')
module.exports = async function (context, req)
{
let response = {}
try
{
let url = 'https://noahwriting.com/wp-content/uploads/2018/06/APPLE-300x286.jpg'
//call function to download and resize image
response = await resizeImage(url)
}
catch (err)
{
response.type = 'application/json'
if (err.response == undefined)
{
context.log(err)
response.status = 500
response.data = err
}
else
{
response.data = err.response.data
response.status = err.response.status
context.log(response)
}
}
//response
context.res =
{
headers: { 'Content-Type': `${response.type}` },
body: response.buf
}
}
async function resizeImage(url)
{
//read image to buffer
let image = await Jimp.read(url)
//resize image
image.resize(300, Jimp.AUTO)
//save to buffer
let image_buf = await image.getBufferAsync(image.getMIME())
//image.getMIME() returns something like `image/jpeg` which is a valid Content-Type for responses.
return { 'buf': image_buf, 'type': image.getMIME() }
}
(Offtopic but I saw that you are using blob storage so..) if you plan on storing photos/files/anything in Azure Blob Storage and you want to retrieve them in some systematic way you will find out very fast that you can't query the storage and you have to deal with ugly XML. My work around to avoid this way to create a function that stores photos/files in Blob Storage but then saves the url path to the file along with the file name and any other attributes to a mongo storage. So then I can make super fast queries to retrieve an array of links, which point to the respective files.

Related

How to store PDF in mongodb?

I was successful in storing images in mongodb using the base64 url. But when I tried to do so with PDFs, it gave an url which does not work. Wait, let me explain, when I put the image base64 url in the req.body of the POST request, the special signs would get disappeared, so I tried encodeURIComponent() method to make it error free. After that I found that storing the huge string in mongodb was too short to fit in the db, so I tried: app.use(express.json({limit: '50mb'})); app.use(express.urlencoded({limit: '50mb', extended: false })); and It worked! but when the client requests the base64 url, it would come encoded, so I put decodeURIComponent() to decode it and was not a great issue nd I got the desired result, yet with the Image one.
The main issue issue is when it comes to PDF. I don't know why it's happening with PDF only! when I make base64 url in CLIENT side and test it, it works fine, but when it comes to server side, all the mess happens. please help me deal with this.
Note: "I don't want to use Gridfs, formidabe, multer etc for file things"
here's my piece of code:
$('#seasonForm').submit(async function (e) {
e.preventDefault();
const form = $(this);
const ImgFile = document.getElementById('seasonThumbnail').files[0];
const PDFFile = document.getElementById('seasonPDF').files[0];
const imgurl = encodeURIComponent(await getBase64(ImgFile));
const PDFurl = encodeURIComponent(await getBase64(PDFFile));
const url = '/uploadSeason';
$.ajax({
type: "POST",
url: url,
data: form.serialize()+`&Version=<%- NxtSeasons %>&image=${imgurl}&PDF=${PDFurl}`,
success: data => {
console.log(data.message);
if (data.status == "error") {
showIt(".alert", data.message, "error");
} else {
showIt(".alert", data.message, "success");
}
}
});
})
wait, now don't get confused with getBase64() and showIt. these are my functions. getBase64() is a promice which returns base64 url of the file and showIt() is type of alert which I made. Now if you don't know what is base64 url, this is the getBase64 one's code:
const getBase64 = (file) => {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = error => reject(error);
});
}
My nodejs code:
app.post("/uploadSeason", admin, async (req, res) => {
try {
const { Name, Desctiption, Version, image, PDF } = req.body;
const newSeason = new Season({
Name,
Desctiption,
Version,
image: encodeURIComponent(image),
PDF: encodeURIComponent(PDF),
});
await newSeason.save();
res.json({
status: "success",
message: "Season added successfully"
});
} catch (e) {
console.log(e);
res.json({
status: "error",
message: e
});
}
});

Getting the pdf blob from url and insert to drive directly using puppeteer library and fetch

I´m trying to use puppeteer to log in a website and "download" a pdf directly to my drive. I've managed to reach the pdf page with puppeteer and I tried (between other tries) to get the blob using fetch with the cookies to send to drive. I can´t post the login information here, but if you could help me looking for an error (or more) in the code it would be great! For now, it goes to the page before pdf, gets the link, fetch with cookies and insert a pdf in drive, but the pdf is corrupted with 0 kb.
I tried setRequestInterception, getPdf (from puppeteer) and using buffer with some stuff I found on my research.
//Page before pdfPage. Here I got the link: urlPdf
//await page.goto(urlPdf);
//await page.waitForNavigation();
//const htmlPdf = await page.content();
const cookies = await page.cookies()
const opts = {
headers: {
cookie: cookies
}
};
let blob = await fetch(urlPdf,opts).then(r => r.blob());
console.log("pegou o blob")
// upload file in specific folder
var file ;
console.log("driveApi upload reached")
function blobToFile(req){
file = req.body.blob
//A Blob() is almost a File() - it's just missing the two properties below which we will add
file.lastModifiedDate = new Date();
file.name = teste.pdf;//req.body.word;
return file;
}
var folderId = myFolderId;
var fileMetadata = {
'name': 'teste.pdf',
parents: [folderId]
};
var media = {
mimeType: 'application/pdf',
body: file
};
drive.files.create({
auth: jwToken,
resource: fileMetadata,
media: media,
fields: 'id'
}, function(err, file) {
if (err) {
// Handle error
console.error(err);
} else {
console.log('File Id: ', file.data.id);
}
});
I tried many things, but the final solution I came with is posted here:
Puppeteer - How can I get the current page (application/pdf) as a buffer or file?
await page.setRequestInterception(true);
page.on('request', async request => {
if (request.url().indexOf('exibirFat.do')>0) { //This condition is true only in pdf page (in my case of course)
const options = {
encoding: null,
method: request._method,
uri: request._url,
body: request._postData,
headers: request._headers
}
/* add the cookies */
const cookies = await page.cookies();
options.headers.Cookie = cookies.map(ck => ck.name + '=' + ck.value).join(';');
/* resend the request */
const response = await request_client(options);
//console.log(response); // PDF Buffer
buffer = response;
let filename = 'file.pdf';
fs.writeFileSync(filename, buffer); //Save file
} else {
request.continue();
}
});
This solution needs: const request_client = require('request-promise-native');

Download and upload image without saving to disk

Using Node.js, I am trying to get an image from a URL and upload that image to another service without saving image to disk. I have the following code that works when saving the file to disk and using fs to create a readablestream. But as I am doing this as a cron job on a read-only file system (webtask.io) I'd want to achieve the same result without saving the file to disk temporarily. Shouldn't that be possible?
request(image.Url)
.pipe(
fs
.createWriteStream(image.Id)
.on('finish', () => {
client.assets
.upload('image', fs.createReadStream(image.Id))
.then(imageAsset => {
resolve(imageAsset)
})
})
)
Do you have any suggestions of how to achieve this without saving the file to disk? The upload client will take the following
client.asset.upload(type: 'file' | image', body: File | Blob | Buffer | NodeStream, options = {}): Promise<AssetDocument>
Thanks!
How about passing the buffer down to the upload function? Since as per your statement it'll accept a buffer.
As a side note... This will keep it in memory for the duration of the method execution, so if you call this numerous times you might run out of resources.
request.get(url, function (res) {
var data = [];
res.on('data', function(chunk) {
data.push(chunk);
}).on('end', function() {
var buffer = Buffer.concat(data);
// Pass the buffer
client.asset.upload(type: 'buffer', body: buffer);
});
});
I tried some various libraries and it turns out that node-fetch provides a way to return a buffer. So this code works:
fetch(image.Url)
.then(res => res.buffer())
.then(buffer => client.assets
.upload('image', buffer, {filename: image.Id}))
.then(imageAsset => {
resolve(imageAsset)
})
well I know it has been a few years since the question was originally asked, but I have encountered this problem now, and since I didn't find an answer with a comprehensive example I made one myself.
i'm assuming that the file path is a valid URL and that the end of it is the file name, I need to pass an apikey to this API endpoint, and a successful upload sends me back a response with a token.
I'm using node-fetch and form-data as dependencies.
const fetch = require('node-fetch');
const FormData = require('form-data');
const secretKey = 'secretKey';
const downloadAndUploadFile = async (filePath) => {
const fileName = new URL(filePath).pathname.split("/").pop();
const endpoint = `the-upload-endpoint-url`;
const formData = new FormData();
let jsonResponse = null;
try {
const download = await fetch(filePath);
const buffer = await download.buffer();
if (!buffer) {
console.log('file not found', filePath);
return null;
}
formData.append('file', buffer, fileName);
const response = await fetch(endpoint, {
method: 'POST', body: formData, headers: {
...formData.getHeaders(),
"Authorization": `Bearer ${secretKey}`,
},
});
jsonResponse = await response.json();
} catch (error) {
console.log('error on file upload', error);
}
return jsonResponse ? jsonResponse.token : null;
}

Send a file from mobile to Node js server

I'm doing an application with react-native. Now I'm trying to send an image from the mobile to the server (Node Js). For this I'm using react-native-image-picker. And the problem is that when I send the image it save a file but it's empty not contain the photo. I think that the problem probably is that the server can't access to the path of the image because is in a different device. But I don't know how I can do it.
React-Native:
openImagePicker(){
const options = {
title: 'Select Avatar',
storageOptions: {
skipBackup: true,
path: 'images'
}
}
ImagePicker.showImagePicker(options, (imagen) =>{
if (imagen.didCancel) {
console.log('User cancelled image picker');
}
else if (imagen.error) {
console.log('ImagePicker Error: ', imagen.error);
}
else if (imagen.customButton) {
console.log('User tapped custom button: ', imagen.customButton);
}
else {
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[path]", imagen.path);
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
.then(response => {
console.log("ok");
})
.catch(function(err) {
console.log(err);
})
}})}
Node Js:
addPhotoUser = function (req, res) {
User.findById(req.params.id, function(err, user) {
fs.readFile(req.body.file.path, function (err, data) {
var pwd = 'home/ubuntu/.../';
var newPath = pwd + req.body.file.name;
fs.writeFile(newPath, data, function (err) {
imageUrl: URL + req.body.file.name;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
});
});
};
Yes, the problem is that the filepath is on the local device and not the server. You want to send the actual data returned to you by react-native-image-picker not the uri. It looks like that library encodes the data with base64 so you're going to want send that to your server, not the uri returned from the library because it won't be accessible on a remote server.
What this means is that you won't be reading any files on your server but instead just decoding a base64 string in the response body and writing that to your filesystem.
For the client side:
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[data]", imagen.data); // this is base64 encoded!
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
On the server side atob to decode from base64 before writing to the filesystem:
let decoded = atob(req.body.data)
// now this is binary and can written to the filesystem
From there:
fs.writeFile(newPath, decoded, function (err) {
imageUrl: newPath;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
Note, you don't need the filesystem write that's in your code because you're decoding the image that was sent as a b64 string in your request.
There also seems to be some oddities with how you're using that user object. You seem to be only passing a function that handles errors and not any actual data. I don't know what ORM you're using so it's hard to say how it should work. Maybe something like this?
user.save({imageUrl:uriReturnedByFsWrite}, (err, data)=>{...})
Good luck :)
Make an object then send that object to the server. The object will consist of name,path and type, like this:
var imageData = {name: 'image1', path: uri, type: 'image/jpeg'}
Above is a one way to send the image data. The other way is to convert it into BLOB so that server side programmer doesn't have to do this task on their end. You can make BLOB by use of react-native-fetch-blob.
One more way is to directly upload the images to the amazon server(s3) and send the link to the backend..
Function that returns base64 string:
var RNFetchBlob = require('react-native-fetch-blob').default;
getImageAttachment: function(uri_attachment, mimetype_attachment) {
return new Promise((RESOLVE, REJECT) => {
// Fetch attachment
RNFetchBlob.fetch('GET', config.apiRoot+'/app/'+uri_attachment)
.then((response) => {
let base64Str = response.data;
var imageBase64 = 'data:'+mimetype_attachment+';base64,'+base64Str;
// Return base64 image
RESOLVE(imageBase64)
})
}).catch((error) => {
// error handling
console.log("Error: ", error)
});
},
Cheers :)

Issues with streams and S3 upload

I am having issues with a zero byte stream. I am resizing an image and uploading it as a stream to S3. If I pipe output to response it displays correctly.
// Fetch remote file
var request = http.get('mybigfile.jpg', function(response) {
// Setup IM convert
var convert = spawn('convert', ['-', '-strip', '-thumbnail', 600, '-']);
// Pipe file stream to it
response.pipe(convert.stdin);
// Pipe result to browser - works fine
//convert.stdout.pipe(res);
// S3 requires headers
var headers = {
'content-type': response.headers['content-type'],
'x-amz-acl': 'public-read'
};
// Upload to S3
var aws = aws2js.load('s3', aws.key, aws.secret);
aws.setBucket(aws.bucket);
aws.putStream('thumb.jpg', convert.stdout, false, headers, function(err) {
if (err) {
return console.error('Error storing:', err.toString());
} else {
// No errors - this shows - but file is 0kb
console.log(path + ' uploaded to S3');
}
}
I see notes about streams not working with S3 due to content length. I am trying buffers but no success with that so far.
Well no go on streams - I guess I can use pause-stream or multipart to technically achieve this but otherwise I don't think it's possible. I ended up using a buffer.
...
// Pipe file stream to it
response.pipe(convert.stdin);
// Save to buffer
var bufs = [] ;
convert.stdout.on('data', function(chunk) {
bufs.push(chunk);
});
convert.stdout.on('end', function() {
var buffer = Buffer.concat(bufs);
// S3 requires headers
...
aws.putBuffer(path, buffer, false, headers, function(err) {
...

Categories

Resources