Reduce size of image when converted to Base64 - javascript

I am using the File reader in JavaScript,i need to Post my image to WebApi and convert it into byte Array and save it in server,Its working fine,Now my problem is base64 string increasing the size of image, Let say if i upload image of 30Kb, it is storing has 389Kb in server,How i can save in same size or reduce size of image need help
//File Reader
function OnFileEditImageEntry(file) {
var reader = new FileReader();
reader.onloadend = function (evt) {
var ImageBase64 = evt.target.result;
return ImageBase64 ;
};
reader.readAsDataURL(file);
}
//WEB API//
public IHttpActionResult UpdateUserDetails(ImageModel model)
{
try
{
if (model.ImageBase64 != "")
{
var PicDataUrl = "";
string ftpurl = "ftp://xxx.xxxxx.xxxx/";
var username = "xxx";
var password = "xxxxx";
string UploadDirectory = "xxxx/xx";
string FileName =model.ImageFileName;
String uploadUrl = String.Format("{0}{1}/{2}", ftpurl, UploadDirectory,FileName);
FtpWebRequest req = (FtpWebRequest)FtpWebRequest.Create(uploadUrl);
req.Proxy = null;
req.Method = WebRequestMethods.Ftp.UploadFile;
req.Credentials = new NetworkCredential(username, password);
req.EnableSsl = false;
req.UseBinary = true;
req.UsePassive = true;
byte[] data =Convert.FromBase64String(model.ImageBase64);
req.ContentLength = data.Length;
Stream stream = req.GetRequestStream();
stream.Write(data, 0, data.Length);
stream.Close();
}
}
}

Send the raw binary instead of increasing the size ~30% with base64/FileReader
with fetch
// sends the raw binary
fetch('http://example.com/upload', {method: 'post', body: file})
// Append the blob/file to a FormData and send it
var fd = new FormData()
fd.append('file', file, file.name)
fetch('http://example.com/upload', {method: 'post', body: fd})
With XHR
// xhr = new ...
// xhr.open(...)
xhr.send(file) // or
xhr.send(fd) // send the FormData
Normally when uploading files, try to avoid sending a json as many developers tends to to wrong. Binary data in json is equal to bad practice (and larger size) eg:
$.post(url, {
name: '',
data: base64
})
Use the FormData#append as much as possible or if you feel like it:
fd.append('json', json)

Related

convert base64 image to jpeg

Struggling to convert a base64 image captured using a webcam into a jpeg for upload.
The following capture / display photo works (note that I am using webcam.min.js (which returns base64) and not webcam.js (which returns jpeg but relies on Flash) -
function take_snapshot() {
Webcam.snap( function(data_uri) {
// display results in page
document.getElementById('upload_results').innerHTML =
'<img id="imageprev" src="'+data_uri+'"/>';
} );
}
I have tried the following, which may or may not be converting the base 64image to a blob -
function saveSnap(){
var base64image = document.getElementById("imageprev").src;
alert(base64image)
// convert base64 to raw binary data held in a string
var byteString = atob(base64image.split(',')[1]);
// separate out the mime component
var mimeString = base64image.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var dw = new DataView(ab);
for(var i = 0; i < byteString.length; i++) {
dw.setUint8(i, byteString.charCodeAt(i));
alert("arrived here");
// write the ArrayBuffer to a blob, and you're done
return new Blob([ab], {type: mimeString});
}
And this doesn't do anything, except halt the jsp
let image = new Image();
image.src = base64image;
document.body.appendChild(image);
How do I get / see / extract the actual jpeg file so I can then upload it
(it must be something like number.jpeg)
JDK6 / Javascript (no php please)
Any thoughts appreciated.
Regards
Ralph
Create an image object and put the base64 as its source.
let image = new Image();
image.src = 'data:image/png;base64,iVBORw0K...';
document.body.appendChild(image);
var aFilePartss = [image];
var oMyBlob = new Blob(aFileParts, {type : 'image/png'});
// window.open(URL.createObjectURL(oMyBlob));
var fd = new FormData();
fd.append('data', oMyBlob);
$.ajax({
type: 'POST',
url: '/upload.php',
data: fd,
}).done(function(data) {
console.log(data);
});
Here is the basics you need to convert to blob and upload.
const MOCK_DATA_URL = `data:image/jpeg;base64,iVBORw0KGgoAAAANSUhEUgAAABkAAAAZCAYAAADE6YVjAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoV2luZG93cykiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6MEVBMTczNDg3QzA5MTFFNjk3ODM5NjQyRjE2RjA3QTkiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6MEVBMTczNDk3QzA5MTFFNjk3ODM5NjQyRjE2RjA3QTkiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDowRUExNzM0NjdDMDkxMUU2OTc4Mzk2NDJGMTZGMDdBOSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDowRUExNzM0NzdDMDkxMUU2OTc4Mzk2NDJGMTZGMDdBOSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PjjUmssAAAGASURBVHjatJaxTsMwEIbpIzDA6FaMMPYJkDKzVYU+QFeEGPIKfYU8AETkCYI6wANkZQwIKRNDB1hA0Jrf0rk6WXZ8BvWkb4kv99vn89kDrfVexBSYgVNwDA7AN+jAK3gEd+AlGMGIBFDgFvzouK3JV/lihQTOwLtOtw9wIRG5pJn91Tbgqk9kSk7GViADrTD4HCyZ0NQnomi51sb0fUyCMQEbp2WpU67IjfNjwcYyoUDhjJVcZBjYBy40j4wXgaobWoe8Z6Y80CJBwFpunepIzt2AUgFjtXXshNXjVmMh+K+zzp/CMs0CqeuzrxSRpbOKfdCkiMTS1VBQ41uxMyQR2qbrXiiwYN3ACh1FDmsdK2Eu4J6Tlo31dYVtCY88h5ELZIJJ+IRMzBHfyJINrigNkt5VsRiub9nXICdsYyVd2NcVvA3ScE5t2rb5JuEeyZnAhmLt9NK63vX1O5Pe8XaPSuGq1uTrfUgMEp9EJ+CQvr+BJ/AAKvAcCiAR+bf9CjAAluzmdX4AEIIAAAAASUVORK5CYII=`
function takeSnapshotThenUpload() {
//get datauri
let blob = convertToBlob(MOCK_DATA_URL)
return uploadFile(blob)
}
function convertToBlob(base64image) {
// convert base64 to raw binary data held in a string
var byteString = atob(base64image.split(',')[1]);
// separate out the mime component
var mimeString = base64image.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var dw = new DataView(ab);
for (var i = 0; i < byteString.length; i++) {
dw.setUint8(i, byteString.charCodeAt(i));
alert("arrived here");
// write the ArrayBuffer to a blob, and you're done
return new Blob([ab], {
type: mimeString
});
}
}
function uploadFile(blob) {
const formData = new FormData()
formData.append('cancel.jpeg', blob)
fetch('/saveImage', {
method: 'POST',
body: formData
})
.then(response => response.json())
.then(data => {
console.log(data)
})
.catch(error => {
console.error(error)
})
}
<button onclick="takeSnapshotThenUpload()">Take screenshot then upload</button>
Remember to fix your takeSnapshotThenUpload to something like:
<script src="webcam.js"></script>
<div id="my_camera" style="width:320px; height:240px;"></div>
<div id="my_result"></div>
<script language="JavaScript">
Webcam.attach( '#my_camera' );
function take_snapshot() {
Webcam.snap( function(data_uri) {
takeSnapshotThenUpload(data_uri)
} );
}
</script>
Take Snapshot
Example code for converting base64 to file (image/jpeg):
async base64ToFile(base64) {
const res = await fetch(base64)
const buf = await res.arrayBuffer()
const file = new File([buf], "capture_camera.jpeg", {
type: 'image/jpeg',
})
return file;
};

Use the base64 preview of the binary data response (zip file) in angularjs

I always get this error in the downloaded zip file C:\Users\me\Downloads\test.zip: Unexpected end of archive
My current code is:
var blob = new Blob([data], { // data here is the binary content
type: 'octet/stream',
});
var zipUrl = window.URL.createObjectURL(blob);
var fileName = orderNo;
fileName += '.zip';
downloadFile(null, fileName, null, zipUrl, null); // just creates a hidden anchor tag and triggers the download
The response of the call is a binary (I think). Binary Content Here
But the preview is a base64. Base64 Content. And it is the correct one. The way I verify it is by using this fiddle.
You can refer to the screenshot of the network here
I put the base64 content in this line var sampleBytes = base64ToArrayBuffer(''); And the zip downloaded just opens fine.
Things I have tried so far.
Adding this headers to the GET call
var headers = {
Accept: 'application/octet-stream',
responseType: 'blob',
};
But I get Request header field responseType is not allowed by Access-Control-Allow-Headers in preflight response.
We're using an already ajax.service.js in our AngularJS project.
From this answer
var blob = new Blob([yourBinaryDataAsAnArrayOrAsAString], {type: "application/octet-stream"});
var fileName = "myFileName.myExtension";
saveAs(blob, fileName);
There are other things that I have tried that I have not listed. I will edit the questions once I find them again
But where I'm current at right now. The preview is correct base64 of the binary file. Is it possible to use that instead of the binary? (If it is I will not find the other methods that I've tested) I tried some binary to base64 converters but they don't work.
So I just went and ditched using the ajax.service.js, that we have, for this specific call.
I used the xhr snippet from this answer. I just added the headers necessary for our call: tokens and auth stuff.
And I used this code snippet for the conversion thing.
And the code looks like this:
fetchBlob(url, function (blob) {
// Array buffer to Base64:
var base64 = btoa(String.fromCharCode.apply(null, new Uint8Array(blob)));
var blob = new Blob([base64ToArrayBuffer(base64)], {
type: 'octet/stream',
});
var zipUrl = window.URL.createObjectURL(blob);
var fileName = orderNo;
fileName += ' Attachments ';
fileName += moment().format('DD-MMM-YYYY');
fileName += '.zip';
downloadFile(null, fileName, null, zipUrl, null); // create a hidden anchor tag and trigger download
});
function fetchBlob(uri, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', uri, true);
xhr.responseType = 'arraybuffer';
var x = AjaxService.getAuthHeaders();
xhr.setRequestHeader('auth_stuff', x['auth_stuff']);
xhr.setRequestHeader('token_stuff', x['token_stuff']);
xhr.setRequestHeader('Accept', 'application/octet-stream');
xhr.onload = function (e) {
if (this.status == 200) {
var blob = this.response;
if (callback) {
callback(blob);
}
}
};
return xhr.send();
};
function base64ToArrayBuffer(base64) {
var binaryString = window.atob(base64);
var binaryLen = binaryString.length;
var bytes = new Uint8Array(binaryLen);
for (var i = 0; i < binaryLen; i++) {
var ascii = binaryString.charCodeAt(i);
bytes[i] = ascii;
};
return bytes;
}

MemoryStream to HttpResponseMessage

I have generated a Pdf on the server, and need to return it as a response to my web client, so that I get a 'Save As' dialog.
The pdf is generated, and saved to a Memory stream... which is then returned to my method which will return the HttpResponseMessage.
The is the method:
[Route("GeneratePdf"), HttpPost]
public HttpResponseMessage GeneratePdf(PlateTemplateExtendedDto data)
{
var doc = GeneratePdf(DataForThePdf);
//using (var file = File.OpenWrite("c:\\temp\\test.pdf"))
// doc.CopyTo(file); // no need for manual stream copy and buffers
HttpResponseMessage response = Request.CreateResponse(HttpStatusCode.OK);
byte[] buffer = new byte[0];
//get buffer
buffer = doc.GetBuffer();
//content length for use in header
var contentLength = buffer.Length;
response.Headers.AcceptRanges.Add("bytes");
response.StatusCode = HttpStatusCode.OK;
response.Content = new StreamContent(doc);
response.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("render");
response.Content.Headers.ContentDisposition.FileName = "yes.pdf";
response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/pdf");
response.Content.Headers.ContentLength = doc.Length;
return response;
}
However, the document renders as a blank file, and although it has a file size, and properties of the document I created (pdf information if File Properties is all right, as well as page width and height), the document displays as blank.
If I un-comment the code that is commented out, to save locally, the file is perfect. File size is 228,889 bytes. However, when I let it go to my web page and save it, it's 405,153 bytes and the filename is 'undefined'.
If I breakpoint, I see these results:
On the front end script, I handle the downloaded object like this:
$.post("/api/PlateTemplate/GeneratePdf", data).done(function (data, status, headers) {
// headers = headers();
var filename = headers['x-filename'];
var contentType = headers['content-type'];
//Create a url to the blob
var blob = new Blob([data], { type: contentType });
var url = window.URL.createObjectURL(blob);
var linkElement = document.createElement('a');
linkElement.setAttribute('href', url);
linkElement.setAttribute("download", filename);
//Force a download
var clickEvent = new MouseEvent("click", {
"view": window,
"bubbles": true,
"cancelable": false
});
linkElement.dispatchEvent(clickEvent);
});
I'm unsure where the file is being corrupted. What am I doing wrong?
Edit: Using the following code as suggested:
$.post("/api/PlateTemplate/GeneratePdf", data).done(function (data, status, headers) {
alert(data.length);
var xhr = new XMLHttpRequest();
$("#pdfviewer").attr("src", URL.createObjectURL(new Blob([data], {
type: "application/pdf"
})))
.Net code:
var doc = GeneratePdf(pdfParams);
HttpResponseMessage response = Request.CreateResponse(HttpStatusCode.OK);
byte[] buffer = new byte[0];
//get buffer
buffer = doc.ToArray();
//content length for use in header
var contentLength = buffer.Length;
response.Headers.AcceptRanges.Add("bytes");
response.StatusCode = HttpStatusCode.OK;
response.Content = new StreamContent(doc);
response.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("render");
response.Content.Headers.ContentDisposition.FileName = "yes.pdf";
response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/pdf");
response.Content.Headers.ContentLength = doc.Length;
return response;
It seems I am losing data.
The alert is the length of the 'data.length' in my javascript, after I get data back from the call.
The file properties is the original pdf file info.
File sends from api, size is 227,564, which matches the byte size on disk if I save it. So it SEEMS the sending is OK. But on the javascript size, when I read in the file, it's 424946, when I do: var file = new Blob([data], { type: 'application/pdf' }); (Where data is the response from the server).
The ContentLength setting looks somewhat suspicious (not consequent):
//content length for use in header
var contentLength = buffer.Length;
response.Content.Headers.ContentLength = doc.Length;
I 'fixed' this by using base64 encoded string from the .Net controller to the javascript web api call result, and then allowed the browser to convert it into binary by specifying the type ('application/pdf').

Javascript formdata: encrypt files before appending

I need to modify existing frontend (angular) code that involves uploading files to a server. Now the files need to be encrypted before being uploaded.
The current approach uses FormData to append a number of files and send them in a single request as shown below:
function uploadFiles(wrappers){
var data = new FormData();
// Add each file
for(var i = 0; i < wrappers.length; i++){
var wrapper = wrappers[i];
var file = wrapper.file;
data.append('file_' + i, file);
}
$http.post(uri, data, requestCfg).then(
/*...*
I have been using Forge in other projects, but never in this sort of context and don't really see how to encrypt files on the fly and still append them as FormData contents.
Forge provides an easy API:
var key = forge.random.getBytesSync(16);
var iv = forge.random.getBytesSync(8);
// encrypt some bytes
var cipher = forge.rc2.createEncryptionCipher(key);
cipher.start(iv);
cipher.update(forge.util.createBuffer(someBytes));
cipher.finish();
var encrypted = cipher.output;
The backend recieves files using Formidable and all the file hanlding is already wired. I would thus like to stick to using the existing front-end logic but simply insert the encryption logic. In that, it's not the entire formdata that must be encrypted... I haven't found a good lead yet to approach this.
Suggestions are very welcome!
Ok, found a solution and added the decrypt code as well. This adds a layer of async code.
function appendFile(aFile, idx){
// Encrypt if a key was provided for this protocol test
if(!key){
data.append('dicomfile_' + idx, file);
appendedCount++;
onFileAppended();
}
else{
var reader = new FileReader();
reader.onload = function(){
// 1. Read bytes
var arrayBuffer = reader.result;
var bytes = new Uint8Array(arrayBuffer); // byte array aka uint8
// 2. Encrypt
var cipher = forge.cipher.createCipher('AES-CBC', key);
cipher.start({iv: iv});
cipher.update(forge.util.createBuffer(bytes));
cipher.finish();
// 3. To blob (file extends blob)
var encryptedByteCharacters = cipher.output.getBytes(); // encryptedByteCharacters is similar to an ATOB(b64) output
// var asB64 = forge.util.encode64(encryptedBytes);
// var encryptedByteCharacters = atob(asB64);
// Convert to Blob object
var blob = byteCharsToBlob(encryptedByteCharacters, "application/octet-stream", 512);
// 4. Append blob
data.append('dicomfile_' + idx, blob, file.name);
// Decrypt for the sake of testing
if(true){
var fileReader = new FileReader();
fileReader.onload = function() {
arrayBuffer = this.result;
var bytez = new Uint8Array(arrayBuffer);
var decipher = forge.cipher.createDecipher('AES-CBC', key);
decipher.start({iv: iv});
decipher.update(forge.util.createBuffer(bytez));
decipher.finish();
var decryptedByteCharacters = decipher.output.getBytes();
var truz = bytes === decryptedByteCharacters;
var blob = byteCharsToBlob(decryptedByteCharacters, "application/octet-stream", 512);
data.append('decrypted_' + idx, blob, file.name + '.decrypted');
appendedCount++;
onFileAppended();
};
fileReader.readAsArrayBuffer(blob);
}
else{
// z. Resume processing
appendedCount++;
onFileAppended();
}
}
// Read file
reader.readAsArrayBuffer(aFile);
}
}
function onFileAppended(){
// Only proceed when all files were appended and optionally encrypted (async)
if(appendedCount !== wrappers.length) return;
/* resume processing, upload or do whathever */

URL encoding "data:image/jpg; base64" image

How can I encode the data:image/jpeg;base64 data url to be transmitted correctly through an AJAX POST. I have the following code xhr.open('POST', 'http://url-sent-to/image/' + saveImage + '&imageid=' + imageid.value, true); that is doing so now.
However, the URL http://url-sent-to/image/data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD…RRQAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAFFFFAH/2Q==&imageid=testimagedata does not look like it will be correct, especially since it has = in it.
$(function () {
var fileInput = document.getElementById("file")
, renderButton = $("#renderButton")
, imgly = new ImglyKit({
container: "#container",
ratio: 1 / 1
});
// As soon as the user selects a file...
fileInput.addEventListener("change", function (event) {
var file;
var fileToBlob = event.target.files[0];
var blob = new Blob([fileToBlob], {"type":fileToBlob.type});
// do stuff with blob
console.log(blob);
// Find the selected file
if(event.target.files) {
file = event.target.files[0];
} else {
file = event.target.value;
}
// Use FileReader to turn the selected
// file into a data url. ImglyKit needs
// a data url or an image
var reader = new FileReader();
reader.onload = (function(file) {
return function (e) {
data = e.target.result;
// Run ImglyKit with the selected file
try {
imgly.run(data);
} catch (e) {
if(e.name == "NoSupportError") {
alert("Your browser does not support canvas.");
} else if(e.name == "InvalidError") {
alert("The given file is not an image");
}
}
};
})(file);
reader.readAsDataURL(file);
});
// As soon as the user clicks the render button...
// Listen for "Render final image" click
renderButton.click(function (event) {
var dataUrl;
imgly.renderToDataURL("image/jpeg", { size: "1200" }, function (err, dataUrl) {
// `dataUrl` now contains a resized rendered image with
// a width of 300 pixels while keeping the ratio
//Convert DataURL to Blob to send over Ajax
function dataURItoBlob(dataUrl) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataUrl.split(',')[1]);
// separate out the mime component
var mimeString = dataUrl.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// write the ArrayBuffer to a blob, and you're done
//var bb = new BlobBuilder();
//bb.append(ab);
//return bb.getBlob(mimeString);
}
var blob = dataURItoBlob(dataUrl);
var fd = new FormData(document.forms[0]);
var xhr = new XMLHttpRequest();
var saveImage = dataUrl;
//console.log(saveImage);
fd.append("myFile", blob);
xhr.open('POST', 'http://url-sent-to/image/' + saveImage + '&imageid=' + imageid.value, true);
xhr.send(fd);
I have a fiddle setup for an example of what I'm doing. Essentially, the user will select an image, enter a description, and hit render. When you check the Javascript console, you'll see a Blob is created, and the POST message at the bottom: http://jsfiddle.net/mattography/Lgduvce1/2/
You're looking for encodeURI(), which will do exactly what you're looking for.
Note that you're missing a ? to start your querystring.
Also note that making URLs that long is a bad idea; you should send a POST request instead.

Categories

Resources