Losing quality in image when sending to back end - javascript

The user uploads a photo which is then edited and cropped using Croppie. The edited and cropped version is the one that needs to be saved in the back end.
The current code takes the edited image, saves it in the database though when fetched, it displays the edited image though it has lost colour/not showing colours (displayed below)
Image being sent:
Image fetched:
And here is the code:
<input id="uploadAvatar" type="file" name="file" accept="image/*" />
$('#uploadAvatar).on('change', function(){
var reader = new FileReader();
reader.onload = function (e) {
var blob = dataURItoBlob(e.target.result;);
var fd = new FormData();
fd.append("file", blob);
//SERVICE HANDLES THE HTTP REQUEST
httpUploadAvatarServ('../p3sweb/FileUploadServlet', fd, function (callback) {
console.log(callback)
});
}
}
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var array = [];
for(var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {type: 'image/jpeg '});
}
Question
Why is it that the image is losing quality/colour?

I think your issue may be in the dataURItoBlob function. Here's how I go about getting the base64 data with croppie before sending it to the server. Tried to abridge my code for simplicity... Hopefully I didn't make it worse:
function PlaceLargeImg (input, imgId, zindex) {
var $imgel = $(imgId);
var reader = new FileReader();
reader.onload = function(e) {
// Sets preview image src to the value of file input.
$imgel.attr('src', e.target.result);
// Bind croppie
cropperlg.bind({
url: $imgel.attr('src')
});
// On click method which crops and sets results to html input.
$('.btn-crop-lg').on('click', () => {
cropperlg.result({
type: 'canvas',
size: {width: 1000, height: 400}
}).then(function(result) {
$('#base64lg').val(result); // Resulting cropped img base64 that gets sent to server.
$('#uploadLg').submit();
});
});
};
reader.readAsDataURL(input.files[0]);
}
EDIT: If your image looks good in a clint-side preview AFTER parsing through dataUritoBlob, then the issue is on the server. In this case, you'll need to post your server side code.

Related

Storing the canvas as a image with .png extension but in a variable

I have two Webpages, one in which a user draws on the canvas and can download it as an image, the second webpages the image can be uploaded to be further processed. is there a way to remove this downloading and again uploading and do the whole process in a single step?
The second page requires an image with an extension of PNG
I tried using canvas.toDataURL but its not working
Update
i have added the functionality of the second webpage to the first
can i directly save the canvas in the same format when an image is uploaded
this is the function to upload the file on the second webpage
function handleFileSelect(evt) {
const file = evt.target.files[0];
// do nothing if no file is selected
if (file == null) {
postData.data = '';
postData.type = '';
return;
}
// only allow images
if (!file.type.match('image.*')) {
alert('Unsupported Image File');
resetForm();
return;
}
const reader = new FileReader();
reader.onload = (event) => {
postData.data = event.target.result;
postData.type = file.type;
};
// Read in the image file as binary string.
reader.readAsBinaryString(file);
}
how can i convert the canvas in the result to "event.target.result"

Cropping a profile picture in Node JS

I want the user on my site to be able to crop an image that they will use as a profile picture. I then want to store this image in an uploads folder on my server.
I've done this using php and the JCrop plugin, but I've recently started to change the framework of my site to use Node JS.
This is how I allowed the user to crop an image before using JCrop:
$("#previewSub").Jcrop({
onChange: showPreview,
onSelect: showPreview,
aspectRatio: 1,
setSelect: [0,imgwidth+180,0,0],
minSize: [90,90],
addClass: 'jcrop-light'
}, function () {
JcropAPI = this;
});
and I would use php to store it in a folder:
<?php
$targ_w = $targ_h = 300;
$jpeg_quality = 90;
$img_r = imagecreatefromjpeg($_FILES['afile']['tmp_name']);
$dst_r = ImageCreateTrueColor( $targ_w, $targ_h );
imagecopyresampled($dst_r,$img_r,0,0,$_POST['x'],$_POST['y'],
$targ_w,$targ_h,$_POST['w'],$_POST['h']);
header("Content-type: image/jpeg");
imagejpeg($dst_r,'uploads/sample3.jpg', $jpeg_quality);
?>
Is there an equivalent plugin to JCrop, as shown above, using Node JS? There are probably multiple ones, if there are, what ones would you recommend? Any simple examples are appreciated too.
EDIT
Because the question is not getting any answers, perhaps it is possible to to keep the JCrop code above, and maybe change my php code to Node JS. If this is possible, could someone show me how to translate my php code, what would would be the equivalent to php above?
I am very new to Node, so I'm having a difficult time finding the equivalent functions and what not.
You could send the raw image (original user input file) and the cropping paramenters result of JCrop.
Send the image enconded in base64 (string), when received by the server store it as a Buffer :
var img = new Buffer(img_string, 'base64');
ImageMagick Doc : http://www.imagemagick.org/Usage/files/#inline
(inline : working with base64)
Then the server has the image in a buffer and the cropping parameters.
From there you could use something like :
https://github.com/aheckmann/gm ...or...
https://github.com/rsms/node-imagemagick
to cast the modifications to the buffer image and then store the result in the file system.
You have other options, like manipulating it client-side and sending the encoded image result of the cropping.
EDIT : First try to read & encode the image when the user uses the input :
$('body').on("change", "input#selectImage", function(){readImage(this);});
function readImage(input) {
if ( input.files && input.files[0] ) {
var FR = new FileReader();
FR.onload = function(e) {
console.log(e.target.result);
// Display in <img> using the b64 string as src
$('#uploadPreview').attr( "src", e.target.result );
// Send the encoded image to the server
socket.emit('upload_pic', e.target.result);
};
FR.readAsDataURL( input.files[0] );
}
}
Then when received at the server use the Buffer as mentioned above
var matches = img.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/), response = {};
if (matches.length !== 3) {/*invalid string!*/}
else{
var filename = 'filename';
var file_ext = '.png';
response.type = matches[1];
response.data = new Buffer(matches[2], 'base64');
var saveFileAs = 'storage-directory/'+ filename + file_ext;
fs.unlink(saveFileAs, function() {
fs.writeFile(saveFileAs, response.data, function(err) {if(err) { /* error saving image */}});
});
}
I would personally send the encoded image once it has been edited client-side.
The server simply validates and saves the file, let the client do the extra work.
As promised, here is how I used darkroom.js with one of my Express projects.
//Location to store the image
var multerUploads = multer({ dest: './uploads/' });
I upload the image first, and then allow the user to crop it. This is because, I would like to keep the original image hence, the upload jade below:
form(method='post', action='/user/image/submit', enctype='multipart/form-data')
input(type='hidden', name='refNumber', value='#{refNumber}')
input(type='file', name='photograph' accept='image/jpeg,image/png')
br
input(type='submit', value='Upload image', data-role='button')
Here is the form I use to crop the image
//- figure needed for darkroom.js
figure(class='image-container', id='imageContainer')
//specify source from where it should load the uploaded image
img(class='targetImg img-responsive', src='/user/image/view/#{photoName}', id='target')
form(name='croppedImageForm', method='post', enctype='multipart/form-data', id='croppedImageForm')
input(type='hidden', name='refNumber', id='refNumber', value='#{refNumber}')
input(type='hidden', id='encodedImageValue', name='croppedImage')
br
input(type='submit', value='Upload Cropped Image', id='submitCroppedImage' data-role='button')
The darkroom.js is attached to the figure element using this piece of javascript.
new Darkroom('#target', {
// Canvas initialization size
minWidth: 160,
minHeight: 160,
maxWidth: 900,
maxHeight: 900,
});
});
Once you follow the STEP 1, STEP 2 and finally STEP 3 the base64 value of the cropped region is stored in under figure element see the console log shown in the screenshot below:
I then have a piece of javascript that is triggered when the Upload Cropped Image is clicked and it then copy/paste the base64 value of the img from figure into the input element with id encodedImageValue and it then submit it to the server. The javascript function is as follow:
$("#submitCroppedImage").click(function() {
var img = $('#imageContainer img');
var imgSrc = img.attr('src');
if(imgSrc !== undefined && imgSrc.indexOf('base64') > 0) {
$('#encodedImageValue').val(img.attr('src'));
$.ajax({
type: "POST",
url: "/user/image/cropped/submit",
data: $('#croppedImageForm').serialize(),
success: function(res, status, xhr) {
alert('The CROPPED image is UPLOADED.');
},
error: function(xhr, err) {
console.log('There was some error.');
}
});
} else {
alert('Please follow the steps correctly.');
}
});
Here is a screenshot of POST request with base64 field as its body
The post request is mapped to the following route handler in Express app:
router.post('/user/image/cropped/submit',
multerUploads,
function(req, res) {
var photoName = null;
var refNumber = req.body.refNumber;
var base64Data = req.body.croppedImage.replace(/^data:image\/png;base64,/, "");
fs.writeFile("./uploads/cropped-" + 'profile_image.png', base64Data, 'base64',
function(err) {
logger.info ("Saving image to disk ...");
res.status(200).send();
});
});
I have the following .js files relating to awesome Fabric.js and darkroom.js
script(src='/static/js/jquery.min.js')
script(src='/static/js/bootstrap.min.js')
//get the js files from darkroom.js github
script(src='/static/js/fabric.js')
script(src='/static/js/darkroom.js')
script(src='/static/js/plugins/darkroom.crop.js')
script(src='/static/js/plugins/darkroom.history.js')
script(src='/static/js/plugins/darkroom.save.js')
link(href='/static/css/darkroom.min.css', rel='stylesheet')
link(href='https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.3.2/css/bootstrap.css', rel='stylesheet')
//get this from darkroom.js github
link(href='/static/css/page.css', rel='stylesheet')
Lastly, also copy the svg icons for selecting, cropping, saving, etc (from darkroo.js github page).

Uploading an image from computer to Fabric.JS Canvas

Are there any Fabric.JS Wizards out there?
I've done my fair research and I can't seem to find much of an explanation on how to add an image to the fabric.JS canvas.
User Journey:
a) User uploads an image from an input file type button.
b) As soon as they have selected an image from their computer I want to place it into the users canvas.
So far I've got to storing the image into an expression, this is my code below:
scope.setFile = function(element) {
scope.currentFile = element.files[0];
var reader = new FileReader();
/**
*
*/
reader.onload = function(event) {
// This stores the image to scope
scope.imageSource = event.target.result;
scope.$apply();
};
// when the file is read it triggers the onload event above.
reader.readAsDataURL(element.files[0]);
};
My HTML/Angular:
<label class="app-file-input button">
<i class="icon-enter"></i>
<input type="file"
id="trigger"
onchange="angular.element(this).scope().setFile(this)"
accept="image/*">
</label>
If you haven't guessed yet I am using a MEAN stack. Mongoose, Express, Angular and Node.
The scope imageSource is what the image is store in. I've read this SO
and it talks about pushing the image to the Image object with my result, and then pass it to the fabric.Image object. Has anyone done a similar thing that they can help me with?
Thanks in advance
**** UPDATE ****
Directive defines the canvas variable:
var canvas = new fabric.Canvas(attrs.id, {
isDrawingMode: true
});
Upload image from computer with Fabric js.
var canvas = new fabric.Canvas('canvas');
document.getElementById('file').addEventListener("change", function (e) {
var file = e.target.files[0];
var reader = new FileReader();
reader.onload = function (f) {
var data = f.target.result;
fabric.Image.fromURL(data, function (img) {
var oImg = img.set({left: 0, top: 0, angle: 00,width:100, height:100}).scale(0.9);
canvas.add(oImg).renderAll();
var a = canvas.setActiveObject(oImg);
var dataURL = canvas.toDataURL({format: 'png', quality: 0.8});
});
};
reader.readAsDataURL(file);
});
canvas{
border: 1px solid black;
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
<script src="https://rawgit.com/kangax/fabric.js/master/dist/fabric.min.js"></script>
<input type="file" id="file"><br />
<canvas id="canvas" width="450" height="450"></canvas>
once you have a dataURL done, you can do either:
reader.onload = function(event) {
// This stores the image to scope
fabric.Image.fromURL(event.target.result, function(img) {
canvas.add(img);
});
scope.$apply();
};
Or you put on your image tag an onload event where you do:
var img = new fabric.Image(your_img_element);
canvas.add(img);
This is for drag and drop from desktop using the dataTransfer interface.
canvas.on('drop', function(event) {
// prevent the file to open in new tab
event.e.stopPropagation();
event.e.stopImmediatePropagation();
event.e.preventDefault();
// Use DataTransfer interface to access the file(s)
if(event.e.dataTransfer.files.length > 0){
var files = event.e.dataTransfer.files;
for (var i = 0, f; f = files[i]; i++) {
// Only process image files.
if (f.type.match('image.*')) {
// Read the File objects in this FileList.
var reader = new FileReader();
// listener for the onload event
reader.onload = function(evt) {
// put image on canvas
fabric.Image.fromURL(evt.target.result, function(obj) {
obj.scaleToHeight(canvas.height);
obj.set('strokeWidth',0);
canvas.add(obj);
});
};
// Read in the image file as a data URL.
reader.readAsDataURL(f);
}
}
}
});
Resources
https://developer.mozilla.org/en-US/docs/Web/API/HTML_Drag_and_Drop_API/File_drag_and_drop

Upload HTML5 canvas as a blob

I have a canvas, which I can paint to the DOM without a problem as well as save for the user locally using:
storCanvas.toBlob(function(blob) { saveAs(blob, name + ".png");});
(note: I've included canvas-toBlob.js for cross platform support, from http://eligrey.com/blog/post/saving-generated-files-on-the-client-side)
Now, what I would like to do is send the same canvas element to the server. I thought I could do something like:
storCanvas.toBlob(function(blob) {upload(blob);});
where upload(blob); is:
function upload(blobOrFile)
{
var xhr = new XMLHttpRequest();
xhr.open('POST', 'upload_file.php', true);
xhr.onload = function(e) { /*uploaded*/ };
// Listen to the upload progress.
var progressBar = document.querySelector('progress');
xhr.upload.onprogress = function(e) {
if (e.lengthComputable)
{
progressBar.value = (e.loaded / e.total) * 100;
progressBar.textContent = progressBar.value;
}
};
xhr.send(blobOrFile);
}
(cribbed from: http://www.html5rocks.com/en/tutorials/file/xhr2/)
Now, I thought I this would work, but my PHP page (which I can do a POST to successfully using a standard HTML form) reports back (via firebug) that it got an invalid file of 0kB. I assume that the issue is in my conversion to a blob, but I'm not sure the correct way to go about it...
I came across the same problem when learning about blobs myself. I believe the problem is in submitting the blob itself through the XMLHttpRequest rather than putting it inside a FormData like this:
canvas.toBlob(function(blob) {
var formData = new FormData(); //this will submit as a "multipart/form-data" request
formData.append("image_name", blob); //"image_name" is what the server will call the blob
upload(formData); //upload the "formData", not the "blob"
});
Hope this helps.
You can use this module for upload blob.
blobtools.js (https://github.com/extremeFE/blobtools.js)
//include blobtools.js
blobtools.uploadBlob({ // upload blob
blob: blob,
url: uploadUrl, // upload url
fileName : 'paste_image.png' // upload file name
callback: callback // callback after upload
});
Not sure if want this. But how about converting the canvas element into image data url and then sending it to the server and then writing it on the server.
Something like
function canvas2DataUrl(canvasElementId){
var canvasElement = document.getElementById("canvasElementId");
var imgData = canvasElement.toDataURL("image/png");
return imgData;
}

Is it possible to load an image into a web page locally?

The idea is to take an image from a user's machine and allow them to display the image in a webpage. I do not want to send the image back to the server.
An upload button will exist. It should just update the page content dynamically.
Can this be done with HTML5 localstorage or anything?
FileReader is capable of doing this. Here's sample code:
<input type="file" id="files" name="files[]" multiple />
<img id="image" />
<script>
function onLoad(evt) {
/* do sth with evt.target.result - it image in base64 format ("data:image/jpeg;base64,....") */
localStorage.setItem('image', evt.target.result);
document.getElementById('image').src = evt.target.result;
};
function handleFileUpload(evt) {
var files = evt.target.files; // FileList object
for (var i = 0; i < files.length; i++) {
var f = files[i];
// Only process image files.
if (!f.type.match('image.*')) {
continue;
}
var reader = new FileReader();
reader.onload = onLoad;
reader.readAsDataURL(f);
}
}
document.getElementById('files').addEventListener('change', handleFileUpload, false);
var image = localStorage.getItem('image');
if (image !== null) {
document.getElementById('image').src = image;
}
</script>
Yeah absolutely, providing you have a cutting edge browser (Chrome or Firefox).
You want to use FileReader and readAsDataURL(). Take a look at the third example in this tutorial:
http://www.html5rocks.com/en/tutorials/file/dndfiles/

Categories

Resources