Save captured png as arraybuffer - javascript

I'm trying to save an image to dropbox, and having trouble getting the convertion correct. I have an img (captured using this sample) and I want to store it to dropbox that accepts an ArrayBuffer (sample here)
This is the code I found that should to the two conversions, first to a base64, then into a ArrayBuffer
function getBase64Image(img) {
// Create an empty canvas element
var canvas = document.createElement("canvas");
canvas.width = img.width;
canvas.height = img.height;
// Copy the image contents to the canvas
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0);
// Get the data-URL formatted image
// Firefox supports PNG and JPEG. You could check img.src to
// guess the original format, but be aware the using "image/jpg"
// will re-encode the image.
var dataURL = canvas.toDataURL("image/png");
return dataURL.replace(/^data:image\/(png|jpg);base64,/, "");
}
function base64ToArrayBuffer(string_base64) {
var binary_string = window.atob(string_base64);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
var ascii = binary_string.charCodeAt(i);
bytes[i] = ascii;
}
return bytes.buffer;
}
Saving is started like this
var img = $('#show-picture')[0];
var data = base64ToArrayBuffer( getBase64Image(img));
dropbox.client.writeFile(moment().format('YYYYMMDD-HH-mm-ss')+'.png', data, function (error, stat) {
if (error) {
return dropbax.handleError(error);
}
// The image has been succesfully written.
});
Problem is that I get a corrupted file saved, and is a bit confused on what's wrong.
*EDIT *
Here's the link to the original file
https://www.dropbox.com/s/ekyhvu2t6d8ldh3/original.PNG and here to the corrupted. https://www.dropbox.com/s/f0oevj1z33brpur/20131219-22-23-14.png
I'm using this version of the dropbox.js: //cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.10.2/dropbox.min.js
As you can see the corrupted is slighty bigger 23,3KB vs 32,6 KB
Thanks for any help
Larsi

Moving my comment to an answer, since it seems that this works in the latest Datastore JS SDK but perhaps not in dropbox.js 0.10.2.
What browser and what version of the Dropbox library? And what's wrong with the image that's saved? (I assume by "corrupted" you mean that it won't open in whatever tool you're using... any more hints? Is the file size reasonable?) I just did a very similar test (toDataURL, atob, and Uint8Array) with Chrome on OS X and dropbox.com/static/api/dropbox-datastores-1.0-latest.js, and it seems to work.

Related

JS Upload image from Canvas without "toDataURL" method

I'm trying to upload image from canvas to the server. The common solution is to get data from canvas using toDataURL method but unfortunately some images leads Chrome to crush. I was trying to use image/png and image/jpeg mime types, I was also trying to reduce quality for image/jpeg up to 0.4 but Chrome (vesrion 45.0.2454.85 m) was crushing anyway. Is there any way to extract image from cavnas without this method, like retrieving Blob object or something like this?
Your problem is that the images are too big, and chrome fails with an Out of memory crash. Maybe your solution is reduce the quality of the images returning it in JPEG.
var fullQuality = canvas.toDataURL("image/jpeg", 1.0);
var mediumQuality = canvas.toDataURL("image/jpeg", 0.5);
var lowQuality = canvas.toDataURL("image/jpeg", 0.1);
You can try too the image format image/webp special in chrome.
Good luck.
EDIT
You've got the method toBlob() available too, if you need to change the logic:
https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob
var canvas = document.getElementById("canvas");
canvas.toBlob(function(blob) {
var newImg = document.createElement("img"),
url = URL.createObjectURL(blob);
newImg.onload = function() {
// no longer need to read the blob so it's revoked
URL.revokeObjectURL(url);
};
newImg.src = url;
document.body.appendChild(newImg);
});
EDIT 2
According to MDN website, you have a polyfill.
if (!HTMLCanvasElement.prototype.toBlob) {
Object.defineProperty(HTMLCanvasElement.prototype, 'toBlob', {
value: function (callback, type, quality) {
var binStr = atob( this.toDataURL(type, quality).split(',')[1] ),
len = binStr.length,
arr = new Uint8Array(len);
for (var i=0; i<len; i++ ) {
arr[i] = binStr.charCodeAt(i);
}
callback( new Blob( [arr], {type: type || 'image/png'} ) );
}
});
}

Load file into IMAGE object using Phantom.js

I'm trying to load image and put its data into HTML Image element but without success.
var fs = require("fs");
var content = fs.read('logo.png');
After reading content of the file I have to convert it somehow to Image or just print it to canvas. I was trying to conver binary data to Base64 Data URL with the code I've found on Stack.
function base64encode(binary) {
return btoa(unescape(encodeURIComponent(binary)));
}
var base64Data = 'data:image/png;base64,' +base64encode(content);
console.log(base64Data);
Returned Base64 is not valid Data URL. I was trying few more approaches but without success. Do you know the best (shortest) way to achieve that?
This is a rather ridiculous workaround, but it works. Keep in mind that PhantomJS' (1.x ?) canvas is a bit broken. So the canvas.toDataURL function returns largely inflated encodings. The smallest that I found was ironically image/bmp.
function decodeImage(imagePath, type, callback) {
var page = require('webpage').create();
var htmlFile = imagePath+"_temp.html";
fs.write(htmlFile, '<html><body><img src="'+imagePath+'"></body></html>');
var possibleCallback = type;
type = callback ? type : "image/bmp";
callback = callback || possibleCallback;
page.open(htmlFile, function(){
page.evaluate(function(imagePath, type){
var img = document.querySelector("img");
// the following is copied from http://stackoverflow.com/a/934925
var canvas = document.createElement("canvas");
canvas.width = img.width;
canvas.height = img.height;
// Copy the image contents to the canvas
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0);
// Get the data-URL formatted image
// Firefox supports PNG and JPEG. You could check img.src to
// guess the original format, but be aware the using "image/jpg"
// will re-encode the image.
window.dataURL = canvas.toDataURL(type);
}, imagePath, type);
fs.remove(htmlFile);
var dataUrl = page.evaluate(function(){
return window.dataURL;
});
page.close();
callback(dataUrl, type);
});
}
You can call it like this:
decodeImage('logo.png', 'image/png', function(imgB64Data, type){
//console.log(imgB64Data);
console.log(imgB64Data.length);
phantom.exit();
});
or this
decodeImage('logo.png', function(imgB64Data, type){
//console.log(imgB64Data);
console.log(imgB64Data.length);
phantom.exit();
});
I tried several things. I couldn't figure out the encoding of the file as returned by fs.read. I also tried to dynamically load the file into the about:blank DOM through file://-URLs, but that didn't work. I therefore opted to write a local html file to the disk and open it immediately.

How to convert Uint8ClampedArray to node-like Buffer;

I am working on a node module that uses xmlrpc to post images to wordpress. The
postIMGRPC method accepts the raw image data and submits it via the wp.uploadFile uri. I have successfully tested the method in node using an image file:
var imgData;
fs.readFile("anyImage.jpg", function(e,d){ imgData = d } );
wp.postIMGRPC("anyImage.jpg", imgData, 1)
.then(function(r){console.log(r)})
Now, I would like to be able to grab the imgData of a canvas in client-side javascript, and submit it in the same fashion via my node.js module:
var c=document.getElementById("myCanvas");
var ctx=c.getContext("2d");
var img=document.getElementById("myImg");
ctx.drawImage(img,0,0);
var imgData = ctx.getImageData(0,0,250,300);
var data = imgData.data;
However, when I attempt to pass data (type Uint8ClampedArray) to my node module, the file uploaded is a blank image. I have tried:
wp.postIMGRPC("anyImage.jpg", imgDataFromJS, 1)
.then(function(r){console.log(r)})
as well as
buf = new Buffer(imgDataFromJS)
wp.postIMGRPC("anyImage.jpg", buf, 1)
.then(function(r){console.log(r)})
How do I convert the Uint8ClampedArray from imgData to a Buffer that is similar to the one returned by fs.readFile()?
Use Buffer.from, as in:
...
var imgData = ctx.getImageData(0,0,250,300);
var data = imgData.data;
var buf = Buffer.from(data);

Setting img.src to dataUrl Leaks Memory

Below I've created a simple test case that shows that when an img tag's src is set to different dataUrls, it leaks memory. It looks like the image data is never unloaded after the src is changed to something else.
<!DOCTYPE html>
<html>
<head>
<title>Leak Test</title>
<script type="text/javascript">
canvas = null;
context = null;
image = null;
onLoad = function(event)
{
canvas = document.getElementById('canvas');
context = canvas.getContext('2d');
image = document.getElementById('image');
setTimeout(processImage, 1000);
}
processImage = function(event)
{
var imageData = null;
for (var i = 0; i < 500; i ++)
{
context.fillStyle = "rgba(" + Math.floor(Math.random() * 256) + "," + Math.floor(Math.random() * 256) + "," + Math.floor(Math.random() * 256) + "," + Math.random() +")";
context.fillRect(0, 0, canvas.width, canvas.height);
imageData = canvas.toDataURL("image/jpeg", .5);
image.src = imageData;
}
setTimeout(processImage, 1000);
}
</script>
</head>
<body onload="onLoad(event)">
<canvas id="canvas"></canvas>
<img id="image"></img>
</body>
</html>
If you load this html page, RAM usage builds over time and is never cleaned up. This issue looks very similar: Rapidly updating image with Data URI causes caching, memory leak . Is there anything I can do to prevent this memory leak?
I ended up doing a work around for the issue. The memory bloat only happens when the image.src is changed, so I just bypassed the Image object altogether. I did this by taking the dataUrl, converting it into binary (https://gist.github.com/borismus/1032746) then parsing it using jpg.js (https://github.com/notmasteryet/jpgjs). Using jpg.js I can then copy the image back to my canvas, so the Image element is completely bybassed thus negating the need to set its src attribute.
Panchosoft's answer solved this for me in Safari.
This workaround avoids the memory increase by bypassing the leaking Image object.
// Methods to address the memory leaks problems in Safari
var BASE64_MARKER = ';base64,';
var temporaryImage;
var objectURL = window.URL || window.webkitURL;
function convertDataURIToBlob(dataURI) {
// Validate input data
if(!dataURI) return;
// Convert image (in base64) to binary data
var base64Index = dataURI.indexOf(BASE64_MARKER) + BASE64_MARKER.length;
var base64 = dataURI.substring(base64Index);
var raw = window.atob(base64);
var rawLength = raw.length;
var array = new Uint8Array(new ArrayBuffer(rawLength));
for(i = 0; i < rawLength; i++) {
array[i] = raw.charCodeAt(i);
}
// Create and return a new blob object using binary data
return new Blob([array], {type: "image/jpeg"});
}
then, in the processImage rendering loop:
// Destroy old image
if(temporaryImage) objectURL.revokeObjectURL(temporaryImage);
// Create a new image from binary data
var imageDataBlob = convertDataURIToBlob(imageData);
// Create a new object URL
temporaryImage = objectURL.createObjectURL(imageDataBlob);
// Set the new image
image.src = temporaryImage;
I'm also experiencing this issue and I do believe it's a browser bug. I see this happening in FF and Chrome as well. At least Chrome once had a similar bug that was fixed. I think it's not gone or not completely gone. I see a constant increase in memory when I set img.src repeatedly to unique images. I have filed a bug with Chromium, if you want to put some weight in :)
https://code.google.com/p/chromium/issues/detail?id=309543&thanks=309543&ts=1382344039
(The bug triggering example does not necessarily generate a new unique image every time around, but at at least it does with a high probability)
Some solutions not mentioned in the other answers:
For browser
jpeg-js
Similar to jpgjs mentioned by #PaulMilham, but with additional features and a nicer API (imo).
For NodeJS/Electron
sharp
General purpose image-processing library for NodeJS, with functionality to both read and write jpeg, png, etc. images (as files, or just in memory).
Since my program is in Electron, I ended up using sharp, as jpeg-js mentioned it as a more performant alternative (due to its core being written in native code).
Setting the source to a fixed minimal dataURI after handling the image seems to fix the issue for me:
const dummyPng = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAAC0lEQVQYV2NgAAIAAAUAAarVyFEAAAAASUVORK5CYII=';
img.onload = () => {
// ... process the image
URL.revokeObjectURL(img.src);
img.onload = null;
img.src = dummyPng;
};
img.src = URL.createObjectURL(new window.Blob([new Uint8Array(data)], {type: 'image/png'}));

Converting an image to base64 using javascript [duplicate]

I have a regular HTML page with some images (just regular <img /> HTML tags). I'd like to get their content, base64 encoded preferably, without the need to redownload the image (ie. it's already loaded by the browser, so now I want the content).
I'd love to achieve that with Greasemonkey and Firefox.
Note: This only works if the image is from the same domain as the page, or has the crossOrigin="anonymous" attribute and the server supports CORS. It's also not going to give you the original file, but a re-encoded version. If you need the result to be identical to the original, see Kaiido's answer.
You will need to create a canvas element with the correct dimensions and copy the image data with the drawImage function. Then you can use the toDataURL function to get a data: url that has the base-64 encoded image. Note that the image must be fully loaded, or you'll just get back an empty (black, transparent) image.
It would be something like this. I've never written a Greasemonkey script, so you might need to adjust the code to run in that environment.
function getBase64Image(img) {
// Create an empty canvas element
var canvas = document.createElement("canvas");
canvas.width = img.width;
canvas.height = img.height;
// Copy the image contents to the canvas
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0);
// Get the data-URL formatted image
// Firefox supports PNG and JPEG. You could check img.src to
// guess the original format, but be aware the using "image/jpg"
// will re-encode the image.
var dataURL = canvas.toDataURL("image/png");
return dataURL.replace(/^data:image\/(png|jpg);base64,/, "");
}
Getting a JPEG-formatted image doesn't work on older versions (around 3.5) of Firefox, so if you want to support that, you'll need to check the compatibility. If the encoding is not supported, it will default to "image/png".
This Function takes the URL then returns the image BASE64
function getBase64FromImageUrl(url) {
var img = new Image();
img.setAttribute('crossOrigin', 'anonymous');
img.onload = function () {
var canvas = document.createElement("canvas");
canvas.width =this.width;
canvas.height =this.height;
var ctx = canvas.getContext("2d");
ctx.drawImage(this, 0, 0);
var dataURL = canvas.toDataURL("image/png");
alert(dataURL.replace(/^data:image\/(png|jpg);base64,/, ""));
};
img.src = url;
}
Call it like this :
getBase64FromImageUrl("images/slbltxt.png")
Coming long after, but none of the answers here are entirely correct.
When drawn on a canvas, the passed image is uncompressed + all pre-multiplied.
When exported, its uncompressed or recompressed with a different algorithm, and un-multiplied.
All browsers and devices will have different rounding errors happening in this process
(see Canvas fingerprinting).
So if one wants a base64 version of an image file, they have to request it again (most of the time it will come from cache) but this time as a Blob.
Then you can use a FileReader to read it either as an ArrayBuffer, or as a dataURL.
function toDataURL(url, callback){
var xhr = new XMLHttpRequest();
xhr.open('get', url);
xhr.responseType = 'blob';
xhr.onload = function(){
var fr = new FileReader();
fr.onload = function(){
callback(this.result);
};
fr.readAsDataURL(xhr.response); // async call
};
xhr.send();
}
toDataURL(myImage.src, function(dataURL){
result.src = dataURL;
// now just to show that passing to a canvas doesn't hold the same results
var canvas = document.createElement('canvas');
canvas.width = myImage.naturalWidth;
canvas.height = myImage.naturalHeight;
canvas.getContext('2d').drawImage(myImage, 0,0);
console.log(canvas.toDataURL() === dataURL); // false - not same data
});
<img id="myImage" src="https://dl.dropboxusercontent.com/s/4e90e48s5vtmfbd/aaa.png" crossOrigin="anonymous">
<img id="result">
A more modern version of kaiido's answer using fetch would be:
function toObjectUrl(url) {
return fetch(url)
.then((response)=> {
return response.blob();
})
.then(blob=> {
return URL.createObjectURL(blob);
});
}
https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch
Edit: As pointed out in the comments this will return an object url which points to a file in your local system instead of an actual DataURL so depending on your use case this might not be what you need.
You can look at the following answer to use fetch and an actual dataURL: https://stackoverflow.com/a/50463054/599602
shiv / shim / sham
If your image(s) are already loaded (or not), this "tool" may come in handy:
Object.defineProperty
(
HTMLImageElement.prototype,'toDataURL',
{enumerable:false,configurable:false,writable:false,value:function(m,q)
{
let c=document.createElement('canvas');
c.width=this.naturalWidth; c.height=this.naturalHeight;
c.getContext('2d').drawImage(this,0,0); return c.toDataURL(m,q);
}}
);
.. but why?
This has the advantage of using the "already loaded" image data, so no extra request is needed. Additionally it lets the end-user (programmer like you) decide the CORS and/or mime-type and quality -OR- you can leave out these arguments/parameters as described in the MDN specification here.
If you have this JS loaded (prior to when it's needed), then converting to dataURL is as simple as:
examples
HTML
<img src="/yo.jpg" onload="console.log(this.toDataURL('image/jpeg'))">
JS
console.log(document.getElementById("someImgID").toDataURL());
GPU fingerprinting
If you are concerned about the "preciseness" of the bits then you can alter this tool to suit your needs as provided by #Kaiido's answer.
its 2022, I prefer to use modern createImageBitmap() instead of onload event.
*note: image should be same origin or CORS enabled
async function imageToDataURL(imageUrl) {
let img = await fetch(imageUrl);
img = await img.blob();
let bitmap = await createImageBitmap(img);
let canvas = document.createElement("canvas");
let ctx = canvas.getContext("2d");
canvas.width = bitmap.width;
canvas.height = bitmap.height;
ctx.drawImage(bitmap, 0, 0, bitmap.width, bitmap.height);
return canvas.toDataURL("image/png");
// image compression?
// return canvas.toDataURL("image/png", 0.9);
};
(async() => {
let dataUrl = await imageToDataURL('https://en.wikipedia.org/static/images/project-logos/enwiki.png')
wikiImg.src = dataUrl;
console.log(dataUrl)
})();
<img id="wikiImg">
Use onload event to convert image after loading
function loaded(img) {
let c = document.createElement('canvas')
c.getContext('2d').drawImage(img, 0, 0)
msg.innerText= c.toDataURL();
}
pre { word-wrap: break-word; width: 500px; white-space: pre-wrap; }
<img onload="loaded(this)" src="https://cors-anywhere.herokuapp.com/http://lorempixel.com/200/140" crossorigin="anonymous"/>
<pre id="msg"></pre>
This is all you need to read.
https://developer.mozilla.org/en-US/docs/Web/API/FileReader/readAsBinaryString
var height = 200;
var width = 200;
canvas.width = width;
canvas.height = height;
var ctx = canvas.getContext('2d');
ctx.strokeStyle = '#090';
ctx.beginPath();
ctx.arc(width/2, height/2, width/2 - width/10, 0, Math.PI*2);
ctx.stroke();
canvas.toBlob(function (blob) {
//consider blob is your file object
var reader = new FileReader();
reader.onload = function () {
console.log(reader.result);
}
reader.readAsBinaryString(blob);
});
In HTML5 better use this:
{
//...
canvas.width = img.naturalWidth; //img.width;
canvas.height = img.naturalHeight; //img.height;
//...
}

Categories

Resources