I have this method that grabs an image before it's saved to a parse.com database and scales it down.
Take a look at the code:
var Image = require("parse-image"); // module
Parse.Cloud.beforeSave("Garments", function(request, response) {
Parse.Cloud.httpRequest({
url: request.object.get("image").url()
}).then(function(response) {
var image = new Image();
return image.setData(response.buffer);
}).then(function(image) {
// Resize the image.
return image.scale({
width: 300,
height: 450
});
}).then(function(image) {
// Make sure it's a JPEG to save disk space and bandwidth.
return image.setFormat("JPEG");
}).then(function(image) {
// Get the image data in a Buffer.
return image.data();
}).then(function(buffer) {
// Save the image into a new file.
var base64 = buffer.toString("base64");
var cropped = new Parse.File("image.jpg", { base64: base64 });
return cropped.save();
}).then(function(cropped) {
// Attach the image file to the original object.
request.object.set("image", cropped);
}).then(function(result) {
response.success();
}, function(error) {
response.error(error);
});
});
Question:
Is it possible to do above for 5 more images?
I have 6 image columns altogether.
image, image2, image3, image4, image5, image6
A row will never exist without the "image" column being populated. The other images are optional. So when scaling I need to go ahead and scale "image" and if image2, image3, image4, image5 and image6 don't exist don't throw any errors. If they do exist then scale them down too.
I'm sitting here scratching my head trying to come up with an efficient way to code this. I'd really appreciate if a javascript expert could come up with something.
I don't feel me repeating this code a few more times is efficient at all.
Thanks for your time
Turn most of that code into a function that returns the final promise and use Parse.Promise.when() to wait for an array of promises to finish, here's a bit to get you started:
var imagePromises = [];
var garment = request.object;
// you said "image" is always populated, so always add it
imagePromises.push(createImagePromise(garment, "image", garment.get("image").url()));
// now conditionally add the other promises, using a loop to further reduce repeated code
for (var i = 2; i < 7; i++) {
var imageColumn = "image" + i;
if (garment.get(imageColumn) && garment.get(imageColumn).url()) {
imagePromises.push(createImagePromise(garment, imageColumn, garment.get(imageColumn).url()));
}
}
// now we have all the promises, wait for them all to finish before we're done
Parse.Promise.when(imagePromises).then(function () {
response.success();
}, function (error) {
response.error(error);
});
The only last part is to make the createImagePromise() function.
function createImagePromise(garment, imageColumn, url) {
// we want to return the promise
return Parse.Cloud.httpRequest({
url: url
}).then(function (response) {
// ... etc ...
}).then(function (cropped) {
// Attach the image file to the original object.
garment.set(imageColumn, cropped);
});
}
NOTE:
There is a limit to how long this is allowed to run, beforeSave only has 3 seconds to run before it gets terminated, which might not be long enough to process 6 images.
Related
I'm trying to load paths from different SVG files and keep them in an array. I don't really want them in the canvas, since I'm mostly using them to create new paths from them. However, I do want from time to time to actually add them to the canvas to be rendered.
Here's a code snippet that shows my problem:
var canvas = new fabric.Canvas('c', {
backgroundColor: 'skyblue'
});
canvas.renderAll();
var orig_shapes = []; // I want to store the paths here
function loadShape(url){
fabric.loadSVGFromURL(url, function(paths, options){
let shape = paths[0];
orig_shapes.push(shape);
});
}
loadShape('path0.svg');
loadShape('path1.svg');
// Add all the loaded shapes to the canvas
console.log(orig_shapes.length);
orig_shapes.forEach(function(shape, index, arr) {
canvas.add(orig_shapes[index]);
});
canvas.renderAll();
This script is loaded at the end of the <body> element inside the web page.
I expected to see the paths rendered and a 2 in the console. Unfortunately, all I get is a blue background and a 0.
Despite this, if I check orig_shapes.length() inside the console, I do get a 2 back; so apparently the paths are eventually pushed to the array (but not when I need to). I can even add the paths to the canvas writing canvas.add(orig_shapes[i]) in the console. They are rendered with no problems.
So what's the problem? Why isn't this working as expected?
As Durga commented, loadSVGFromURL is an asynchronous function, so I needed to check first that the SVGs had been loaded. Here's a version of the script that uses Promises to make sure of this:
var canvas = new fabric.Canvas('c', {
backgroundColor: 'skyblue'
});
canvas.renderAll();
var orig_shapes = [];
function loadShape(url){
return new Promise(function(resolve, reject){
fabric.loadSVGFromURL(url, function(paths, options){
let shape = paths[0];
orig_shapes.push(shape);
resolve(shape);
});
});
}
function loadShapes(urls, shape_callback) {
let promises = [];
urls.forEach(function(url) {
promises.push(loadShape(url).then(shape_callback));
});
return Promise.all(promises);
}
function addToCanvas(object) {
canvas.add(object);
}
loadShapes(['path0.svg', 'path1.svg'])
.then(function(objs) {
objs.forEach(addToCanvas);
});
loadShape and loadShapes both allow me to set especific actions to execute when a single shape is loaded and when all shapes have been loaded, respectively.
I am working with pngjs through many of it's methods. Most of the time, they work fine. However, like in the following example, I get an error: "Stream is not writable"
var fs = require('fs'),
PNG = require('pngjs').PNG;
var dst = new PNG({width: 100, height: 50});
fs.createReadStream('http://1.1m.yt/hry7Eby.png') //download this picture in order to examine the code.
.pipe(new PNG())
.on('parsed', function(data) {
console.log(data);
});
This case is not singular, I get this error on 1 random png image once a day, through all of pngjs methods, and that error obviously crashes my app.
(note: you can't use the http link I gave you with a readStream, you will have to download & rename it and do something like):
fs.createReadStream('1.png')
Thank you for your time and effort.
This seems to be a bug in the library, though I'm wary of saying so as I'm no expert in PNGs. The parser seems to complete while the stream is still writing. It encounters the IEND, and so calls this:
ParserAsync.prototype._finished = function() {
if (this.errord) {
return;
}
if (!this._inflate) {
this.emit('error', 'No Inflate block');
}
else {
// no more data to inflate
this._inflate.end();
}
this.destroySoon();
};
If you comment out the this.destroySoon(); it finishes the image correctly, instead of eventually calling this function:
ChunkStream.prototype.end = function(data, encoding) {
if (data) {
this.write(data, encoding);
}
this.writable = false;
// already destroyed
if (!this._buffers) {
return;
}
// enqueue or handle end
if (this._buffers.length === 0) {
this._end();
}
else {
this._buffers.push(null);
this._process();
}
};
...which would otherwise end up setting the stream.writeable to false, or, if you comment that out, to pushing a null value into the _buffers array and screwing up the ChunkStream._processRead.
I'm fairly certain this is a synchronicity problem between the time the zlib parser takes to complete and the time the stream takes to complete, since if you do this synchronously it works fine:
var data = fs.readFileSync('pic.png');
var png = PNG.sync.read(data);
var buff = PNG.sync.write(png);
fs.writeFileSync('out2.png', buff);
So in my js code, the user is making mosaic images. When they press upload, I create a new object and all is good, my code works perfectly. I then save the id to that object that was just uploaded. If the user presses the upload button again, I want to simply use that same object id and clear out the files that were saved before and use the newest version. All of my saves, updates, and everything appear to be working just fine. When I go to the data browser though, I see the files but when I click, the link is broken. (Which doesn't seem it like it should even be possible). 95% of my code is below, there is some stuff outside of that to generate the mosaic image and to store the id returned. How can I avoid these broken links?
var ins_file = new Parse.File("Instructions.png", { base64: img_ins_InBase64 }, "image/png");
var pretty_file = new Parse.File("Pretty.png", { base64: img_pretty_InBase64 }, "image/png");
ins_file.save().then(function () {
}, function (error) {
console.log("Instruction File couldn't be saved")
// The file either could not be read, or could not be saved to Parse.
});
pretty_file.save().then(function () {
}, function (error) {
console.log("Mosaic File couldn't be saved")
// The file either could not be read, or could not be saved to Parse.
});
var mosaicClass = Parse.Object.extend("Mosaics");
var mosaicObj = new Parse.Object("Mosaics");
var query = new Parse.Query(mosaicClass);
query.get(parseId, {
success: function (objToUpdate) {
// The object was retrieved successfully. SIMPLY UPDATE
objToUpdate.set("img_ins", ins_file);
objToUpdate.set("img_pretty", pretty_file);
objToUpdate.save().then(function () {
console.log("Initial Image updated");
var ins_img_url = objToUpdate.get('img_ins').url();
var pretty_img_url = objToUpdate.get('img_pretty').url();
objToUpdate.set("img_ins_url", ins_img_url);
objToUpdate.set("img_pretty_url", pretty_img_url);
objToUpdate.save();
console.log("Mosaic updated, id was: " + objToUpdate.id);
parseId = objToUpdate.id;
}, function (error) {
console.log("File couldn't be updated")
// The file either could not be read, or could not be saved to Parse.
});
},
error: function (object, error) {
// The object was not retrieved successfully.
// parseId was null so make a new thing
mosaicObj.set("img_ins", ins_file);
mosaicObj.set("img_pretty", pretty_file);
mosaicObj.save().then(function () {
console.log("Initial Images uploaded");
var ins_img_url = mosaicObj.get('img_ins').url();
var pretty_img_url = mosaicObj.get('img_pretty').url();
mosaicObj.set("img_ins_url", ins_img_url);
mosaicObj.set("img_pretty_url", pretty_img_url);
mosaicObj.save();
console.log("Mosaic Saved, id was: " + mosaicObj.id);
parseId = mosaicObj.id;
}, function (error) {
console.log("File couldn't be saved")
// The file either could not be read, or could not be saved to Parse.
});
}
});
It appears you're not waiting for the files to save before trying to assign them to a parse object. Async JS will get you every time.
You should be saving the file object on the parse object only after the save, inside the then. You can put both file saves in an array and use Parse.Promise.when([promise1, promise2]).then(...); too.
I'm in the process of creating a site that preloads several large gifs. Due to the size of the images. I need them all to be loaded before displayed to the user. In the past I have done this numerous times using something basic like this:
var image = new Image();
image.onload = function () { document.appendChild(image); }
image.src = '/myimage.jpg';
However, i'm loading a group of images from an array, which contains the image source url. It should show a loading message and once they have all loaded it show perform a callback and hide the loading message etc.
The code I've been using is below:
var images = ['image1.gif', 'image2.gif', 'image3.gif'];
function preload_images (target, callback) {
// get feedback container
var feedback = document.getElementById('feedback');
// show feedback (loading message)
feedback.style.display = 'block';
// set target
var target = document.getElementById(target);
// clear html of target incase they refresh (tmp fix)
target.innerHTML = '';
// internal counter var
var counter = 0;
// image containers attach to window
var img = new Array();
// loop images
if (images.length > 0) {
for (var i in images) {
// new image object
img[i] = new Image();
// when ready peform certain actions.
img[i].onload = (function (value) {
// append to container
target.appendChild(img[value]);
// hide all images apart from the first image
if (value > 0) {
hide(img[value]);
}
// increment counter
++counter;
// on counter at correct value use callback!
if (counter == images.length) {
// hide feedback (loading message)
feedback.style.display = 'none';
if (callback) {
callback(); // when ready do callback!
}
}
})(i);
// give image alt name
img[i].alt = 'My Image ' + i;
// give image id
img[i].id = 'my_image_' + i
// preload src
img[i].src = images[i];
}//end loop
}//endif length
}//end preload image
It's really weird, I'm sure it should just work, but it doesn't even show my loading message. It just goes straight to the callback.. I'm sure it must be something simple, I've been busy and looking at it for ages and finding it a tad hard to narrow down.
I've been looking over stackoverflow and people have had similar problems and I've tried the solutions without much luck.
Any help would be greatly appreciated! I'll post more code if needed.
Cheers!
If I'm not totally wrong the problem is with you assignment to
// when ready peform certain actions.
img[i].onload = (function (value) {...})(i);
here you instantly call and execute the function and return undefined to the onload attribute, what can not be called when the image is loaded.
What you can do to have access to the value 'i' when the image is loaded you can try something like the following:
onload = (function(val){
var temp = val;
return function(){
i = temp;
//your code here
}
})(i);
this should store the value in temp and will return a callable function which should be able to access this value.
I did not test that if it is working and there maybe a better solution, but this one came to my mind :)
Try this for your onload callback:
img[i].onload = function(event) {
target.appendChild(this);
if (img.indexOf(this) > 0) {
hide(this);
}
// ...
};
Hope you can get it working! It's bed time for me though.
Edit: You'll probably have to do something about img.indexOf(this)... just realized you are using associative array for img. In your original code, I don't think comparing value to 0 is logical in that case, since value is a string. Perhaps you shouldn't use an associative array?
I am building a slideshow with a few hundred images and would like to build a nice loading bar, so the idea was to preload the images using JavaScript, then initialize the rest of the UI afterwords.
Preloading the images is not a problem, but getting the browser to update the status as things load is. I've tried a few things, but the browser will only repaint the display after it finishes.
I've even tried the script from this question, but I get the same results.
Here's what I've got so far (imgList is an array of filenames. I'm using Prototype.)
var imageBuf = []
var loadCount = 0
$('loadStatus').update("0/"+imgList.length)
function init() {
imgList.each(function(element){
imageBuf[element] = new Image()
//imageBuf[element].onload = window.setTimeout("count()",0) // gives "not implemented" error in IE
imageBuf[element].onload = function(){count()}
imageBuf[element].src = "thumbs/"+element
})
}
function count() {
loadCount++
$('loadStatus').update(loadCount+"/"+imgList.length)
}
init()
Try using the function from my answer to this question:
function incrementallyProcess(workerCallback, data, chunkSize, timeout, completionCallback) {
var itemIndex = 0;
(function() {
var remainingDataLength = (data.length - itemIndex);
var currentChunkSize = (remainingDataLength >= chunkSize) ? chunkSize : remainingDataLength;
if(itemIndex < data.length) {
while(currentChunkSize--) {
workerCallback(data[itemIndex++]);
}
setTimeout(arguments.callee, timeout);
} else if(completionCallback) {
completionCallback();
}
})();
}
// here we are using the above function to take
// a short break every time we load an image
function init() {
incrementallyProcess(function(element) {
imageBuf[element] = new Image();
imageBuf[element].onload = function(){count()};
imageBuf[element].src = "thumbs/"+element;
}, imgList, 1, 250, function() {
alert("done loading");
});
}
You may want to modify the chunk size parameter as well as the length of the timeout to get it to behave just like you want it to. I am not 100% sure this will work for you, but it is worth a try...